@@ -13,7 +13,7 @@ describe('Vercel AI integration (V5)', () => {
1313 // First span - no telemetry config, should enable telemetry but not record inputs/outputs when sendDefaultPii: false
1414 expect . objectContaining ( {
1515 data : {
16- 'vercel.ai .model.id ' : 'mock-model-id' ,
16+ 'gen_ai.request .model' : 'mock-model-id' ,
1717 'vercel.ai.model.provider' : 'mock-provider' ,
1818 'vercel.ai.operationId' : 'ai.generateText' ,
1919 'vercel.ai.pipeline.name' : 'generateText' ,
@@ -41,10 +41,9 @@ describe('Vercel AI integration (V5)', () => {
4141 'operation.name' : 'ai.generateText.doGenerate' ,
4242 'vercel.ai.operationId' : 'ai.generateText.doGenerate' ,
4343 'vercel.ai.model.provider' : 'mock-provider' ,
44- 'vercel.ai .model.id ' : 'mock-model-id' ,
44+ 'gen_ai.request .model' : 'mock-model-id' ,
4545 'vercel.ai.settings.maxRetries' : 2 ,
4646 'gen_ai.system' : 'mock-provider' ,
47- 'gen_ai.request.model' : 'mock-model-id' ,
4847 'vercel.ai.pipeline.name' : 'generateText.doGenerate' ,
4948 'vercel.ai.streaming' : false ,
5049 'vercel.ai.response.finishReason' : 'stop' ,
@@ -66,7 +65,7 @@ describe('Vercel AI integration (V5)', () => {
6665 // Third span - explicit telemetry enabled, should record inputs/outputs regardless of sendDefaultPii
6766 expect . objectContaining ( {
6867 data : {
69- 'vercel.ai .model.id ' : 'mock-model-id' ,
68+ 'gen_ai.request .model' : 'mock-model-id' ,
7069 'vercel.ai.model.provider' : 'mock-provider' ,
7170 'vercel.ai.operationId' : 'ai.generateText' ,
7271 'vercel.ai.pipeline.name' : 'generateText' ,
@@ -76,6 +75,7 @@ describe('Vercel AI integration (V5)', () => {
7675 'vercel.ai.settings.maxRetries' : 2 ,
7776 'vercel.ai.streaming' : false ,
7877 'gen_ai.prompt' : '{"prompt":"Where is the second span?"}' ,
78+ 'gen_ai.request.messages' : '[{"role":"user","content":"Where is the second span?"}]' ,
7979 'gen_ai.response.model' : 'mock-model-id' ,
8080 'gen_ai.usage.input_tokens' : 10 ,
8181 'gen_ai.usage.output_tokens' : 20 ,
@@ -97,10 +97,9 @@ describe('Vercel AI integration (V5)', () => {
9797 'operation.name' : 'ai.generateText.doGenerate' ,
9898 'vercel.ai.operationId' : 'ai.generateText.doGenerate' ,
9999 'vercel.ai.model.provider' : 'mock-provider' ,
100- 'vercel.ai .model.id ' : 'mock-model-id' ,
100+ 'gen_ai.request .model' : 'mock-model-id' ,
101101 'vercel.ai.settings.maxRetries' : 2 ,
102102 'gen_ai.system' : 'mock-provider' ,
103- 'gen_ai.request.model' : 'mock-model-id' ,
104103 'vercel.ai.pipeline.name' : 'generateText.doGenerate' ,
105104 'vercel.ai.streaming' : false ,
106105 'vercel.ai.response.finishReason' : 'stop' ,
@@ -124,7 +123,7 @@ describe('Vercel AI integration (V5)', () => {
124123 // Fifth span - tool call generateText span
125124 expect . objectContaining ( {
126125 data : {
127- 'vercel.ai .model.id ' : 'mock-model-id' ,
126+ 'gen_ai.request .model' : 'mock-model-id' ,
128127 'vercel.ai.model.provider' : 'mock-provider' ,
129128 'vercel.ai.operationId' : 'ai.generateText' ,
130129 'vercel.ai.pipeline.name' : 'generateText' ,
@@ -147,7 +146,7 @@ describe('Vercel AI integration (V5)', () => {
147146 // Sixth span - tool call doGenerate span
148147 expect . objectContaining ( {
149148 data : {
150- 'vercel.ai .model.id ' : 'mock-model-id' ,
149+ 'gen_ai.request .model' : 'mock-model-id' ,
151150 'vercel.ai.model.provider' : 'mock-provider' ,
152151 'vercel.ai.operationId' : 'ai.generateText.doGenerate' ,
153152 'vercel.ai.pipeline.name' : 'generateText.doGenerate' ,
@@ -157,7 +156,6 @@ describe('Vercel AI integration (V5)', () => {
157156 'vercel.ai.response.timestamp' : expect . any ( String ) ,
158157 'vercel.ai.settings.maxRetries' : 2 ,
159158 'vercel.ai.streaming' : false ,
160- 'gen_ai.request.model' : 'mock-model-id' ,
161159 'gen_ai.response.finish_reasons' : [ 'tool-calls' ] ,
162160 'gen_ai.response.id' : expect . any ( String ) ,
163161 'gen_ai.response.model' : 'mock-model-id' ,
@@ -202,11 +200,12 @@ describe('Vercel AI integration (V5)', () => {
202200 // First span - no telemetry config, should enable telemetry AND record inputs/outputs when sendDefaultPii: true
203201 expect . objectContaining ( {
204202 data : {
205- 'vercel.ai .model.id ' : 'mock-model-id' ,
203+ 'gen_ai.request .model' : 'mock-model-id' ,
206204 'vercel.ai.model.provider' : 'mock-provider' ,
207205 'vercel.ai.operationId' : 'ai.generateText' ,
208206 'vercel.ai.pipeline.name' : 'generateText' ,
209207 'vercel.ai.prompt' : '{"prompt":"Where is the first span?"}' ,
208+ 'gen_ai.request.messages' : '[{"role":"user","content":"Where is the first span?"}]' ,
210209 'vercel.ai.response.finishReason' : 'stop' ,
211210 'gen_ai.response.text' : 'First span here!' ,
212211 'vercel.ai.settings.maxRetries' : 2 ,
@@ -228,7 +227,7 @@ describe('Vercel AI integration (V5)', () => {
228227 // Second span - doGenerate for first call, should also include input/output fields when sendDefaultPii: true
229228 expect . objectContaining ( {
230229 data : {
231- 'vercel.ai .model.id ' : 'mock-model-id' ,
230+ 'gen_ai.request .model' : 'mock-model-id' ,
232231 'vercel.ai.model.provider' : 'mock-provider' ,
233232 'vercel.ai.operationId' : 'ai.generateText.doGenerate' ,
234233 'vercel.ai.pipeline.name' : 'generateText.doGenerate' ,
@@ -240,7 +239,6 @@ describe('Vercel AI integration (V5)', () => {
240239 'vercel.ai.response.timestamp' : expect . any ( String ) ,
241240 'vercel.ai.settings.maxRetries' : 2 ,
242241 'vercel.ai.streaming' : false ,
243- 'gen_ai.request.model' : 'mock-model-id' ,
244242 'gen_ai.response.finish_reasons' : [ 'stop' ] ,
245243 'gen_ai.response.id' : expect . any ( String ) ,
246244 'gen_ai.response.model' : 'mock-model-id' ,
@@ -260,11 +258,12 @@ describe('Vercel AI integration (V5)', () => {
260258 // Third span - explicitly enabled telemetry, should record inputs/outputs regardless of sendDefaultPii
261259 expect . objectContaining ( {
262260 data : {
263- 'vercel.ai .model.id ' : 'mock-model-id' ,
261+ 'gen_ai.request .model' : 'mock-model-id' ,
264262 'vercel.ai.model.provider' : 'mock-provider' ,
265263 'vercel.ai.operationId' : 'ai.generateText' ,
266264 'vercel.ai.pipeline.name' : 'generateText' ,
267265 'vercel.ai.prompt' : '{"prompt":"Where is the second span?"}' ,
266+ 'gen_ai.request.messages' : '[{"role":"user","content":"Where is the second span?"}]' ,
268267 'vercel.ai.response.finishReason' : 'stop' ,
269268 'gen_ai.response.text' : expect . any ( String ) ,
270269 'vercel.ai.settings.maxRetries' : 2 ,
@@ -291,10 +290,9 @@ describe('Vercel AI integration (V5)', () => {
291290 'operation.name' : 'ai.generateText.doGenerate' ,
292291 'vercel.ai.operationId' : 'ai.generateText.doGenerate' ,
293292 'vercel.ai.model.provider' : 'mock-provider' ,
294- 'vercel.ai .model.id ' : 'mock-model-id' ,
293+ 'gen_ai.request .model' : 'mock-model-id' ,
295294 'vercel.ai.settings.maxRetries' : 2 ,
296295 'gen_ai.system' : 'mock-provider' ,
297- 'gen_ai.request.model' : 'mock-model-id' ,
298296 'vercel.ai.pipeline.name' : 'generateText.doGenerate' ,
299297 'vercel.ai.streaming' : false ,
300298 'vercel.ai.response.finishReason' : 'stop' ,
@@ -318,13 +316,13 @@ describe('Vercel AI integration (V5)', () => {
318316 // Fifth span - tool call generateText span (should include prompts when sendDefaultPii: true)
319317 expect . objectContaining ( {
320318 data : {
321- 'vercel.ai .model.id ' : 'mock-model-id' ,
319+ 'gen_ai.request .model' : 'mock-model-id' ,
322320 'vercel.ai.model.provider' : 'mock-provider' ,
323321 'vercel.ai.operationId' : 'ai.generateText' ,
324322 'vercel.ai.pipeline.name' : 'generateText' ,
325323 'vercel.ai.prompt' : '{"prompt":"What is the weather in San Francisco?"}' ,
324+ 'gen_ai.request.messages' : '[{"role":"user","content":"What is the weather in San Francisco?"}]' ,
326325 'vercel.ai.response.finishReason' : 'tool-calls' ,
327- // 'gen_ai.response.text': 'Tool call completed!',
328326 'gen_ai.response.tool_calls' : expect . any ( String ) ,
329327 'vercel.ai.settings.maxRetries' : 2 ,
330328 'vercel.ai.streaming' : false ,
@@ -345,7 +343,7 @@ describe('Vercel AI integration (V5)', () => {
345343 // Sixth span - tool call doGenerate span (should include prompts when sendDefaultPii: true)
346344 expect . objectContaining ( {
347345 data : {
348- 'vercel.ai .model.id ' : 'mock-model-id' ,
346+ 'gen_ai.request .model' : 'mock-model-id' ,
349347 'vercel.ai.model.provider' : 'mock-provider' ,
350348 'vercel.ai.operationId' : 'ai.generateText.doGenerate' ,
351349 'vercel.ai.pipeline.name' : 'generateText.doGenerate' ,
@@ -360,7 +358,6 @@ describe('Vercel AI integration (V5)', () => {
360358 'gen_ai.response.tool_calls' : expect . any ( String ) ,
361359 'vercel.ai.settings.maxRetries' : 2 ,
362360 'vercel.ai.streaming' : false ,
363- 'gen_ai.request.model' : 'mock-model-id' ,
364361 'gen_ai.response.finish_reasons' : [ 'tool-calls' ] ,
365362 'gen_ai.response.id' : expect . any ( String ) ,
366363 'gen_ai.response.model' : 'mock-model-id' ,
@@ -441,7 +438,7 @@ describe('Vercel AI integration (V5)', () => {
441438 spans : expect . arrayContaining ( [
442439 expect . objectContaining ( {
443440 data : {
444- 'vercel.ai .model.id ' : 'mock-model-id' ,
441+ 'gen_ai.request .model' : 'mock-model-id' ,
445442 'vercel.ai.model.provider' : 'mock-provider' ,
446443 'vercel.ai.operationId' : 'ai.generateText' ,
447444 'vercel.ai.pipeline.name' : 'generateText' ,
@@ -462,7 +459,7 @@ describe('Vercel AI integration (V5)', () => {
462459 } ) ,
463460 expect . objectContaining ( {
464461 data : {
465- 'vercel.ai .model.id ' : 'mock-model-id' ,
462+ 'gen_ai.request .model' : 'mock-model-id' ,
466463 'vercel.ai.model.provider' : 'mock-provider' ,
467464 'vercel.ai.operationId' : 'ai.generateText.doGenerate' ,
468465 'vercel.ai.pipeline.name' : 'generateText.doGenerate' ,
@@ -472,7 +469,6 @@ describe('Vercel AI integration (V5)', () => {
472469 'vercel.ai.response.timestamp' : expect . any ( String ) ,
473470 'vercel.ai.settings.maxRetries' : 2 ,
474471 'vercel.ai.streaming' : false ,
475- 'gen_ai.request.model' : 'mock-model-id' ,
476472 'gen_ai.response.finish_reasons' : [ 'tool-calls' ] ,
477473 'gen_ai.response.id' : expect . any ( String ) ,
478474 'gen_ai.response.model' : 'mock-model-id' ,
0 commit comments