Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Commit 4a43a59

Browse files
committed
fix tests
1 parent c3e94e7 commit 4a43a59

File tree

1 file changed

+24
-24
lines changed
  • dev-packages/node-integration-tests/suites/tracing/openai

1 file changed

+24
-24
lines changed

‎dev-packages/node-integration-tests/suites/tracing/openai/test.ts

Lines changed: 24 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ describe('OpenAI integration', () => {
1414
data: {
1515
'gen_ai.operation.name': 'chat',
1616
'sentry.op': 'gen_ai.chat',
17-
'sentry.origin': 'manual',
17+
'sentry.origin': 'auto.ai.openai',
1818
'gen_ai.system': 'openai',
1919
'gen_ai.request.model': 'gpt-3.5-turbo',
2020
'gen_ai.request.temperature': 0.7,
@@ -32,15 +32,15 @@ describe('OpenAI integration', () => {
3232
},
3333
description: 'chat gpt-3.5-turbo',
3434
op: 'gen_ai.chat',
35-
origin: 'manual',
35+
origin: 'auto.ai.openai',
3636
status: 'ok',
3737
}),
3838
// Second span - responses API
3939
expect.objectContaining({
4040
data: {
4141
'gen_ai.operation.name': 'responses',
4242
'sentry.op': 'gen_ai.responses',
43-
'sentry.origin': 'manual',
43+
'sentry.origin': 'auto.ai.openai',
4444
'gen_ai.system': 'openai',
4545
'gen_ai.request.model': 'gpt-3.5-turbo',
4646
'gen_ai.response.model': 'gpt-3.5-turbo',
@@ -57,29 +57,29 @@ describe('OpenAI integration', () => {
5757
},
5858
description: 'responses gpt-3.5-turbo',
5959
op: 'gen_ai.responses',
60-
origin: 'manual',
60+
origin: 'auto.ai.openai',
6161
status: 'ok',
6262
}),
6363
// Third span - error handling
6464
expect.objectContaining({
6565
data: {
6666
'gen_ai.operation.name': 'chat',
6767
'sentry.op': 'gen_ai.chat',
68-
'sentry.origin': 'manual',
68+
'sentry.origin': 'auto.ai.openai',
6969
'gen_ai.system': 'openai',
7070
'gen_ai.request.model': 'error-model',
7171
},
7272
description: 'chat error-model',
7373
op: 'gen_ai.chat',
74-
origin: 'manual',
74+
origin: 'auto.ai.openai',
7575
status: 'unknown_error',
7676
}),
7777
// Fourth span - chat completions streaming
7878
expect.objectContaining({
7979
data: {
8080
'gen_ai.operation.name': 'chat',
8181
'sentry.op': 'gen_ai.chat',
82-
'sentry.origin': 'manual',
82+
'sentry.origin': 'auto.ai.openai',
8383
'gen_ai.system': 'openai',
8484
'gen_ai.request.model': 'gpt-4',
8585
'gen_ai.request.temperature': 0.8,
@@ -99,15 +99,15 @@ describe('OpenAI integration', () => {
9999
},
100100
description: 'chat gpt-4 stream-response',
101101
op: 'gen_ai.chat',
102-
origin: 'manual',
102+
origin: 'auto.ai.openai',
103103
status: 'ok',
104104
}),
105105
// Fifth span - responses API streaming
106106
expect.objectContaining({
107107
data: {
108108
'gen_ai.operation.name': 'responses',
109109
'sentry.op': 'gen_ai.responses',
110-
'sentry.origin': 'manual',
110+
'sentry.origin': 'auto.ai.openai',
111111
'gen_ai.system': 'openai',
112112
'gen_ai.request.model': 'gpt-4',
113113
'gen_ai.request.stream': true,
@@ -126,7 +126,7 @@ describe('OpenAI integration', () => {
126126
},
127127
description: 'responses gpt-4 stream-response',
128128
op: 'gen_ai.responses',
129-
origin: 'manual',
129+
origin: 'auto.ai.openai',
130130
status: 'ok',
131131
}),
132132
// Sixth span - error handling in streaming context
@@ -137,11 +137,11 @@ describe('OpenAI integration', () => {
137137
'gen_ai.request.stream': true,
138138
'gen_ai.system': 'openai',
139139
'sentry.op': 'gen_ai.chat',
140-
'sentry.origin': 'manual',
140+
'sentry.origin': 'auto.ai.openai',
141141
},
142142
description: 'chat error-model stream-response',
143143
op: 'gen_ai.chat',
144-
origin: 'manual',
144+
origin: 'auto.ai.openai',
145145
status: 'internal_error',
146146
}),
147147
]),
@@ -155,7 +155,7 @@ describe('OpenAI integration', () => {
155155
data: {
156156
'gen_ai.operation.name': 'chat',
157157
'sentry.op': 'gen_ai.chat',
158-
'sentry.origin': 'manual',
158+
'sentry.origin': 'auto.ai.openai',
159159
'gen_ai.system': 'openai',
160160
'gen_ai.request.model': 'gpt-3.5-turbo',
161161
'gen_ai.request.temperature': 0.7,
@@ -176,15 +176,15 @@ describe('OpenAI integration', () => {
176176
},
177177
description: 'chat gpt-3.5-turbo',
178178
op: 'gen_ai.chat',
179-
origin: 'manual',
179+
origin: 'auto.ai.openai',
180180
status: 'ok',
181181
}),
182182
// Second span - responses API with PII
183183
expect.objectContaining({
184184
data: {
185185
'gen_ai.operation.name': 'responses',
186186
'sentry.op': 'gen_ai.responses',
187-
'sentry.origin': 'manual',
187+
'sentry.origin': 'auto.ai.openai',
188188
'gen_ai.system': 'openai',
189189
'gen_ai.request.model': 'gpt-3.5-turbo',
190190
'gen_ai.request.messages': '"Translate this to French: Hello"',
@@ -203,30 +203,30 @@ describe('OpenAI integration', () => {
203203
},
204204
description: 'responses gpt-3.5-turbo',
205205
op: 'gen_ai.responses',
206-
origin: 'manual',
206+
origin: 'auto.ai.openai',
207207
status: 'ok',
208208
}),
209209
// Third span - error handling with PII
210210
expect.objectContaining({
211211
data: {
212212
'gen_ai.operation.name': 'chat',
213213
'sentry.op': 'gen_ai.chat',
214-
'sentry.origin': 'manual',
214+
'sentry.origin': 'auto.ai.openai',
215215
'gen_ai.system': 'openai',
216216
'gen_ai.request.model': 'error-model',
217217
'gen_ai.request.messages': '[{"role":"user","content":"This will fail"}]',
218218
},
219219
description: 'chat error-model',
220220
op: 'gen_ai.chat',
221-
origin: 'manual',
221+
origin: 'auto.ai.openai',
222222
status: 'unknown_error',
223223
}),
224224
// Fourth span - chat completions streaming with PII
225225
expect.objectContaining({
226226
data: expect.objectContaining({
227227
'gen_ai.operation.name': 'chat',
228228
'sentry.op': 'gen_ai.chat',
229-
'sentry.origin': 'manual',
229+
'sentry.origin': 'auto.ai.openai',
230230
'gen_ai.system': 'openai',
231231
'gen_ai.request.model': 'gpt-4',
232232
'gen_ai.request.temperature': 0.8,
@@ -249,15 +249,15 @@ describe('OpenAI integration', () => {
249249
}),
250250
description: 'chat gpt-4 stream-response',
251251
op: 'gen_ai.chat',
252-
origin: 'manual',
252+
origin: 'auto.ai.openai',
253253
status: 'ok',
254254
}),
255255
// Fifth span - responses API streaming with PII
256256
expect.objectContaining({
257257
data: expect.objectContaining({
258258
'gen_ai.operation.name': 'responses',
259259
'sentry.op': 'gen_ai.responses',
260-
'sentry.origin': 'manual',
260+
'sentry.origin': 'auto.ai.openai',
261261
'gen_ai.system': 'openai',
262262
'gen_ai.request.model': 'gpt-4',
263263
'gen_ai.request.stream': true,
@@ -278,7 +278,7 @@ describe('OpenAI integration', () => {
278278
}),
279279
description: 'responses gpt-4 stream-response',
280280
op: 'gen_ai.responses',
281-
origin: 'manual',
281+
origin: 'auto.ai.openai',
282282
status: 'ok',
283283
}),
284284
// Sixth span - error handling in streaming context with PII
@@ -290,11 +290,11 @@ describe('OpenAI integration', () => {
290290
'gen_ai.request.messages': '[{"role":"user","content":"This will fail"}]',
291291
'gen_ai.system': 'openai',
292292
'sentry.op': 'gen_ai.chat',
293-
'sentry.origin': 'manual',
293+
'sentry.origin': 'auto.ai.openai',
294294
},
295295
description: 'chat error-model stream-response',
296296
op: 'gen_ai.chat',
297-
origin: 'manual',
297+
origin: 'auto.ai.openai',
298298
status: 'internal_error',
299299
}),
300300
]),

0 commit comments

Comments
(0)

AltStyle によって変換されたページ (->オリジナル) /