Skip to content

Commit 65e549c

Browse files
authored
ref(core): Add more specific event mechanisms and span origins to openAiIntegration (#17288)
Also changes the `sentry.origin` attribute to from `auto.function.openai` to `auto.ai.openai` since `ai` is the widely used category for these spans. (this was added initially via #17288) `mechanism.type` now follows the same pattern trace origin pattern. ref #17212 ref #17252
1 parent af69b45 commit 65e549c

File tree

5 files changed

+59
-45
lines changed

5 files changed

+59
-45
lines changed

dev-packages/cloudflare-integration-tests/suites/tracing/openai/test.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ it('traces a basic chat completion request', async () => {
1919
data: expect.objectContaining({
2020
'gen_ai.operation.name': 'chat',
2121
'sentry.op': 'gen_ai.chat',
22+
'sentry.origin': 'auto.ai.openai',
2223
'gen_ai.system': 'openai',
2324
'gen_ai.request.model': 'gpt-3.5-turbo',
2425
'gen_ai.request.temperature': 0.7,
@@ -31,7 +32,7 @@ it('traces a basic chat completion request', async () => {
3132
}),
3233
description: 'chat gpt-3.5-turbo',
3334
op: 'gen_ai.chat',
34-
origin: 'auto.function.openai',
35+
origin: 'auto.ai.openai',
3536
}),
3637
]),
3738
);

dev-packages/node-integration-tests/suites/tracing/openai/openai-tool-calls/test.ts

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@ describe('OpenAI Tool Calls integration', () => {
6565
data: {
6666
'gen_ai.operation.name': 'chat',
6767
'sentry.op': 'gen_ai.chat',
68-
'sentry.origin': 'auto.function.openai',
68+
'sentry.origin': 'auto.ai.openai',
6969
'gen_ai.system': 'openai',
7070
'gen_ai.request.model': 'gpt-4',
7171
'gen_ai.request.available_tools': WEATHER_TOOL_DEFINITION,
@@ -83,15 +83,15 @@ describe('OpenAI Tool Calls integration', () => {
8383
},
8484
description: 'chat gpt-4',
8585
op: 'gen_ai.chat',
86-
origin: 'auto.function.openai',
86+
origin: 'auto.ai.openai',
8787
status: 'ok',
8888
}),
8989
// Second span - chat completion with tools and streaming
9090
expect.objectContaining({
9191
data: {
9292
'gen_ai.operation.name': 'chat',
9393
'sentry.op': 'gen_ai.chat',
94-
'sentry.origin': 'auto.function.openai',
94+
'sentry.origin': 'auto.ai.openai',
9595
'gen_ai.system': 'openai',
9696
'gen_ai.request.model': 'gpt-4',
9797
'gen_ai.request.stream': true,
@@ -111,15 +111,15 @@ describe('OpenAI Tool Calls integration', () => {
111111
},
112112
description: 'chat gpt-4 stream-response',
113113
op: 'gen_ai.chat',
114-
origin: 'auto.function.openai',
114+
origin: 'auto.ai.openai',
115115
status: 'ok',
116116
}),
117117
// Third span - responses API with tools (non-streaming)
118118
expect.objectContaining({
119119
data: {
120120
'gen_ai.operation.name': 'responses',
121121
'sentry.op': 'gen_ai.responses',
122-
'sentry.origin': 'auto.function.openai',
122+
'sentry.origin': 'auto.ai.openai',
123123
'gen_ai.system': 'openai',
124124
'gen_ai.request.model': 'gpt-4',
125125
'gen_ai.request.available_tools': WEATHER_TOOL_DEFINITION,
@@ -137,15 +137,15 @@ describe('OpenAI Tool Calls integration', () => {
137137
},
138138
description: 'responses gpt-4',
139139
op: 'gen_ai.responses',
140-
origin: 'auto.function.openai',
140+
origin: 'auto.ai.openai',
141141
status: 'ok',
142142
}),
143143
// Fourth span - responses API with tools and streaming
144144
expect.objectContaining({
145145
data: {
146146
'gen_ai.operation.name': 'responses',
147147
'sentry.op': 'gen_ai.responses',
148-
'sentry.origin': 'auto.function.openai',
148+
'sentry.origin': 'auto.ai.openai',
149149
'gen_ai.system': 'openai',
150150
'gen_ai.request.model': 'gpt-4',
151151
'gen_ai.request.stream': true,
@@ -165,7 +165,7 @@ describe('OpenAI Tool Calls integration', () => {
165165
},
166166
description: 'responses gpt-4 stream-response',
167167
op: 'gen_ai.responses',
168-
origin: 'auto.function.openai',
168+
origin: 'auto.ai.openai',
169169
status: 'ok',
170170
}),
171171
]),
@@ -179,7 +179,7 @@ describe('OpenAI Tool Calls integration', () => {
179179
data: {
180180
'gen_ai.operation.name': 'chat',
181181
'sentry.op': 'gen_ai.chat',
182-
'sentry.origin': 'auto.function.openai',
182+
'sentry.origin': 'auto.ai.openai',
183183
'gen_ai.system': 'openai',
184184
'gen_ai.request.model': 'gpt-4',
185185
'gen_ai.request.messages': '[{"role":"user","content":"What is the weather like in Paris today?"}]',
@@ -200,15 +200,15 @@ describe('OpenAI Tool Calls integration', () => {
200200
},
201201
description: 'chat gpt-4',
202202
op: 'gen_ai.chat',
203-
origin: 'auto.function.openai',
203+
origin: 'auto.ai.openai',
204204
status: 'ok',
205205
}),
206206
// Second span - chat completion with tools and streaming with PII
207207
expect.objectContaining({
208208
data: {
209209
'gen_ai.operation.name': 'chat',
210210
'sentry.op': 'gen_ai.chat',
211-
'sentry.origin': 'auto.function.openai',
211+
'sentry.origin': 'auto.ai.openai',
212212
'gen_ai.system': 'openai',
213213
'gen_ai.request.model': 'gpt-4',
214214
'gen_ai.request.stream': true,
@@ -230,15 +230,15 @@ describe('OpenAI Tool Calls integration', () => {
230230
},
231231
description: 'chat gpt-4 stream-response',
232232
op: 'gen_ai.chat',
233-
origin: 'auto.function.openai',
233+
origin: 'auto.ai.openai',
234234
status: 'ok',
235235
}),
236236
// Third span - responses API with tools (non-streaming) with PII
237237
expect.objectContaining({
238238
data: {
239239
'gen_ai.operation.name': 'responses',
240240
'sentry.op': 'gen_ai.responses',
241-
'sentry.origin': 'auto.function.openai',
241+
'sentry.origin': 'auto.ai.openai',
242242
'gen_ai.system': 'openai',
243243
'gen_ai.request.model': 'gpt-4',
244244
'gen_ai.request.messages': '[{"role":"user","content":"What is the weather like in Paris today?"}]',
@@ -258,15 +258,15 @@ describe('OpenAI Tool Calls integration', () => {
258258
},
259259
description: 'responses gpt-4',
260260
op: 'gen_ai.responses',
261-
origin: 'auto.function.openai',
261+
origin: 'auto.ai.openai',
262262
status: 'ok',
263263
}),
264264
// Fourth span - responses API with tools and streaming with PII
265265
expect.objectContaining({
266266
data: {
267267
'gen_ai.operation.name': 'responses',
268268
'sentry.op': 'gen_ai.responses',
269-
'sentry.origin': 'auto.function.openai',
269+
'sentry.origin': 'auto.ai.openai',
270270
'gen_ai.system': 'openai',
271271
'gen_ai.request.model': 'gpt-4',
272272
'gen_ai.request.stream': true,
@@ -288,7 +288,7 @@ describe('OpenAI Tool Calls integration', () => {
288288
},
289289
description: 'responses gpt-4 stream-response',
290290
op: 'gen_ai.responses',
291-
origin: 'auto.function.openai',
291+
origin: 'auto.ai.openai',
292292
status: 'ok',
293293
}),
294294
]),

dev-packages/node-integration-tests/suites/tracing/openai/test.ts

Lines changed: 26 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ describe('OpenAI integration', () => {
1414
data: {
1515
'gen_ai.operation.name': 'chat',
1616
'sentry.op': 'gen_ai.chat',
17-
'sentry.origin': 'auto.function.openai',
17+
'sentry.origin': 'auto.ai.openai',
1818
'gen_ai.system': 'openai',
1919
'gen_ai.request.model': 'gpt-3.5-turbo',
2020
'gen_ai.request.temperature': 0.7,
@@ -32,15 +32,15 @@ describe('OpenAI integration', () => {
3232
},
3333
description: 'chat gpt-3.5-turbo',
3434
op: 'gen_ai.chat',
35-
origin: 'auto.function.openai',
35+
origin: 'auto.ai.openai',
3636
status: 'ok',
3737
}),
3838
// Second span - responses API
3939
expect.objectContaining({
4040
data: {
4141
'gen_ai.operation.name': 'responses',
4242
'sentry.op': 'gen_ai.responses',
43-
'sentry.origin': 'auto.function.openai',
43+
'sentry.origin': 'auto.ai.openai',
4444
'gen_ai.system': 'openai',
4545
'gen_ai.request.model': 'gpt-3.5-turbo',
4646
'gen_ai.response.model': 'gpt-3.5-turbo',
@@ -57,29 +57,29 @@ describe('OpenAI integration', () => {
5757
},
5858
description: 'responses gpt-3.5-turbo',
5959
op: 'gen_ai.responses',
60-
origin: 'auto.function.openai',
60+
origin: 'auto.ai.openai',
6161
status: 'ok',
6262
}),
6363
// Third span - error handling
6464
expect.objectContaining({
6565
data: {
6666
'gen_ai.operation.name': 'chat',
6767
'sentry.op': 'gen_ai.chat',
68-
'sentry.origin': 'auto.function.openai',
68+
'sentry.origin': 'auto.ai.openai',
6969
'gen_ai.system': 'openai',
7070
'gen_ai.request.model': 'error-model',
7171
},
7272
description: 'chat error-model',
7373
op: 'gen_ai.chat',
74-
origin: 'auto.function.openai',
74+
origin: 'auto.ai.openai',
7575
status: 'unknown_error',
7676
}),
7777
// Fourth span - chat completions streaming
7878
expect.objectContaining({
7979
data: {
8080
'gen_ai.operation.name': 'chat',
8181
'sentry.op': 'gen_ai.chat',
82-
'sentry.origin': 'auto.function.openai',
82+
'sentry.origin': 'auto.ai.openai',
8383
'gen_ai.system': 'openai',
8484
'gen_ai.request.model': 'gpt-4',
8585
'gen_ai.request.temperature': 0.8,
@@ -99,15 +99,15 @@ describe('OpenAI integration', () => {
9999
},
100100
description: 'chat gpt-4 stream-response',
101101
op: 'gen_ai.chat',
102-
origin: 'auto.function.openai',
102+
origin: 'auto.ai.openai',
103103
status: 'ok',
104104
}),
105105
// Fifth span - responses API streaming
106106
expect.objectContaining({
107107
data: {
108108
'gen_ai.operation.name': 'responses',
109109
'sentry.op': 'gen_ai.responses',
110-
'sentry.origin': 'auto.function.openai',
110+
'sentry.origin': 'auto.ai.openai',
111111
'gen_ai.system': 'openai',
112112
'gen_ai.request.model': 'gpt-4',
113113
'gen_ai.request.stream': true,
@@ -126,7 +126,7 @@ describe('OpenAI integration', () => {
126126
},
127127
description: 'responses gpt-4 stream-response',
128128
op: 'gen_ai.responses',
129-
origin: 'auto.function.openai',
129+
origin: 'auto.ai.openai',
130130
status: 'ok',
131131
}),
132132
// Sixth span - error handling in streaming context
@@ -137,11 +137,11 @@ describe('OpenAI integration', () => {
137137
'gen_ai.request.stream': true,
138138
'gen_ai.system': 'openai',
139139
'sentry.op': 'gen_ai.chat',
140-
'sentry.origin': 'auto.function.openai',
140+
'sentry.origin': 'auto.ai.openai',
141141
},
142142
description: 'chat error-model stream-response',
143143
op: 'gen_ai.chat',
144-
origin: 'auto.function.openai',
144+
origin: 'auto.ai.openai',
145145
status: 'internal_error',
146146
}),
147147
]),
@@ -155,7 +155,7 @@ describe('OpenAI integration', () => {
155155
data: {
156156
'gen_ai.operation.name': 'chat',
157157
'sentry.op': 'gen_ai.chat',
158-
'sentry.origin': 'auto.function.openai',
158+
'sentry.origin': 'auto.ai.openai',
159159
'gen_ai.system': 'openai',
160160
'gen_ai.request.model': 'gpt-3.5-turbo',
161161
'gen_ai.request.temperature': 0.7,
@@ -176,15 +176,15 @@ describe('OpenAI integration', () => {
176176
},
177177
description: 'chat gpt-3.5-turbo',
178178
op: 'gen_ai.chat',
179-
origin: 'auto.function.openai',
179+
origin: 'auto.ai.openai',
180180
status: 'ok',
181181
}),
182182
// Second span - responses API with PII
183183
expect.objectContaining({
184184
data: {
185185
'gen_ai.operation.name': 'responses',
186186
'sentry.op': 'gen_ai.responses',
187-
'sentry.origin': 'auto.function.openai',
187+
'sentry.origin': 'auto.ai.openai',
188188
'gen_ai.system': 'openai',
189189
'gen_ai.request.model': 'gpt-3.5-turbo',
190190
'gen_ai.request.messages': '"Translate this to French: Hello"',
@@ -203,30 +203,30 @@ describe('OpenAI integration', () => {
203203
},
204204
description: 'responses gpt-3.5-turbo',
205205
op: 'gen_ai.responses',
206-
origin: 'auto.function.openai',
206+
origin: 'auto.ai.openai',
207207
status: 'ok',
208208
}),
209209
// Third span - error handling with PII
210210
expect.objectContaining({
211211
data: {
212212
'gen_ai.operation.name': 'chat',
213213
'sentry.op': 'gen_ai.chat',
214-
'sentry.origin': 'auto.function.openai',
214+
'sentry.origin': 'auto.ai.openai',
215215
'gen_ai.system': 'openai',
216216
'gen_ai.request.model': 'error-model',
217217
'gen_ai.request.messages': '[{"role":"user","content":"This will fail"}]',
218218
},
219219
description: 'chat error-model',
220220
op: 'gen_ai.chat',
221-
origin: 'auto.function.openai',
221+
origin: 'auto.ai.openai',
222222
status: 'unknown_error',
223223
}),
224224
// Fourth span - chat completions streaming with PII
225225
expect.objectContaining({
226226
data: expect.objectContaining({
227227
'gen_ai.operation.name': 'chat',
228228
'sentry.op': 'gen_ai.chat',
229-
'sentry.origin': 'auto.function.openai',
229+
'sentry.origin': 'auto.ai.openai',
230230
'gen_ai.system': 'openai',
231231
'gen_ai.request.model': 'gpt-4',
232232
'gen_ai.request.temperature': 0.8,
@@ -249,15 +249,15 @@ describe('OpenAI integration', () => {
249249
}),
250250
description: 'chat gpt-4 stream-response',
251251
op: 'gen_ai.chat',
252-
origin: 'auto.function.openai',
252+
origin: 'auto.ai.openai',
253253
status: 'ok',
254254
}),
255255
// Fifth span - responses API streaming with PII
256256
expect.objectContaining({
257257
data: expect.objectContaining({
258258
'gen_ai.operation.name': 'responses',
259259
'sentry.op': 'gen_ai.responses',
260-
'sentry.origin': 'auto.function.openai',
260+
'sentry.origin': 'auto.ai.openai',
261261
'gen_ai.system': 'openai',
262262
'gen_ai.request.model': 'gpt-4',
263263
'gen_ai.request.stream': true,
@@ -278,7 +278,7 @@ describe('OpenAI integration', () => {
278278
}),
279279
description: 'responses gpt-4 stream-response',
280280
op: 'gen_ai.responses',
281-
origin: 'auto.function.openai',
281+
origin: 'auto.ai.openai',
282282
status: 'ok',
283283
}),
284284
// Sixth span - error handling in streaming context with PII
@@ -290,11 +290,11 @@ describe('OpenAI integration', () => {
290290
'gen_ai.request.messages': '[{"role":"user","content":"This will fail"}]',
291291
'gen_ai.system': 'openai',
292292
'sentry.op': 'gen_ai.chat',
293-
'sentry.origin': 'auto.function.openai',
293+
'sentry.origin': 'auto.ai.openai',
294294
},
295295
description: 'chat error-model stream-response',
296296
op: 'gen_ai.chat',
297-
origin: 'auto.function.openai',
297+
origin: 'auto.ai.openai',
298298
status: 'internal_error',
299299
}),
300300
]),
@@ -370,7 +370,7 @@ describe('OpenAI integration', () => {
370370
data: {
371371
'gen_ai.operation.name': 'chat',
372372
'sentry.op': 'gen_ai.chat',
373-
'sentry.origin': 'auto.function.openai',
373+
'sentry.origin': 'auto.ai.openai',
374374
'gen_ai.system': 'openai',
375375
'gen_ai.request.model': 'gpt-3.5-turbo',
376376
'gen_ai.request.temperature': 0.7,
@@ -387,7 +387,7 @@ describe('OpenAI integration', () => {
387387
'openai.usage.prompt_tokens': 10,
388388
},
389389
op: 'gen_ai.chat',
390-
origin: 'auto.function.openai',
390+
origin: 'auto.ai.openai',
391391
status: 'ok',
392392
},
393393
},

0 commit comments

Comments
 (0)