Skip to content

Commit eb68374

Browse files
committed
fixup! fix(tracing): Add missing attributes in vercel-ai spans (#18333)
1 parent abce6f5 commit eb68374

File tree

2 files changed

+28
-21
lines changed

2 files changed

+28
-21
lines changed

packages/core/src/tracing/vercel-ai/index.ts

Lines changed: 5 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,10 @@ import type { Event } from '../../types-hoist/event';
44
import type { Span, SpanAttributes, SpanAttributeValue, SpanJSON, SpanOrigin } from '../../types-hoist/span';
55
import { spanToJSON } from '../../utils/spanUtils';
66
import {
7+
GEN_AI_REQUEST_MESSAGES_ATTRIBUTE,
78
GEN_AI_USAGE_INPUT_TOKENS_CACHE_WRITE_ATTRIBUTE,
89
GEN_AI_USAGE_INPUT_TOKENS_CACHED_ATTRIBUTE,
910
} from '../ai/gen-ai-attributes';
10-
import { getTruncatedJsonString } from '../ai/utils';
1111
import { toolCallSpanMap } from './constants';
1212
import type { TokenSummary } from './types';
1313
import {
@@ -20,7 +20,6 @@ import type { ProviderMetadata } from './vercel-ai-attributes';
2020
import {
2121
AI_MODEL_ID_ATTRIBUTE,
2222
AI_MODEL_PROVIDER_ATTRIBUTE,
23-
AI_PROMPT_ATTRIBUTE,
2423
AI_PROMPT_MESSAGES_ATTRIBUTE,
2524
AI_PROMPT_TOOLS_ATTRIBUTE,
2625
AI_RESPONSE_OBJECT_ATTRIBUTE,
@@ -36,6 +35,7 @@ import {
3635
AI_USAGE_CACHED_INPUT_TOKENS_ATTRIBUTE,
3736
AI_USAGE_COMPLETION_TOKENS_ATTRIBUTE,
3837
AI_USAGE_PROMPT_TOKENS_ATTRIBUTE,
38+
GEN_AI_REQUEST_MODEL_ATTRIBUTE,
3939
GEN_AI_RESPONSE_MODEL_ATTRIBUTE,
4040
GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE,
4141
GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE,
@@ -136,7 +136,7 @@ function processEndedVercelAiSpan(span: SpanJSON): void {
136136
}
137137

138138
// Rename AI SDK attributes to standardized gen_ai attributes
139-
renameAttributeKey(attributes, AI_PROMPT_MESSAGES_ATTRIBUTE, 'gen_ai.request.messages');
139+
renameAttributeKey(attributes, AI_PROMPT_MESSAGES_ATTRIBUTE, GEN_AI_REQUEST_MESSAGES_ATTRIBUTE);
140140
renameAttributeKey(attributes, AI_RESPONSE_TEXT_ATTRIBUTE, 'gen_ai.response.text');
141141
renameAttributeKey(attributes, AI_RESPONSE_TOOL_CALLS_ATTRIBUTE, 'gen_ai.response.tool_calls');
142142
renameAttributeKey(attributes, AI_RESPONSE_OBJECT_ATTRIBUTE, 'gen_ai.response.object');
@@ -146,7 +146,7 @@ function processEndedVercelAiSpan(span: SpanJSON): void {
146146
renameAttributeKey(attributes, AI_TOOL_CALL_RESULT_ATTRIBUTE, 'gen_ai.tool.output');
147147

148148
renameAttributeKey(attributes, AI_SCHEMA_ATTRIBUTE, 'gen_ai.request.schema');
149-
renameAttributeKey(attributes, AI_MODEL_ID_ATTRIBUTE, 'gen_ai.request.model');
149+
renameAttributeKey(attributes, AI_MODEL_ID_ATTRIBUTE, GEN_AI_REQUEST_MODEL_ATTRIBUTE);
150150

151151
addProviderMetadataToAttributes(attributes);
152152

@@ -209,14 +209,8 @@ function processGenerateSpan(span: Span, name: string, attributes: SpanAttribute
209209
span.setAttribute('gen_ai.function_id', functionId);
210210
}
211211

212-
if (attributes[AI_PROMPT_ATTRIBUTE]) {
213-
const truncatedPrompt = getTruncatedJsonString(attributes[AI_PROMPT_ATTRIBUTE] as string | string[]);
214-
span.setAttribute('gen_ai.prompt', truncatedPrompt);
212+
requestMessagesFromPrompt(span, attributes);
215213

216-
if (!attributes['gen_ai.request.messages'] && !attributes[AI_PROMPT_MESSAGES_ATTRIBUTE]) {
217-
requestMessagesFromPrompt(span, attributes[AI_PROMPT_ATTRIBUTE]);
218-
}
219-
}
220214
if (attributes[AI_MODEL_ID_ATTRIBUTE] && !attributes[GEN_AI_RESPONSE_MODEL_ATTRIBUTE]) {
221215
span.setAttribute(GEN_AI_RESPONSE_MODEL_ATTRIBUTE, attributes[AI_MODEL_ID_ATTRIBUTE]);
222216
}

packages/core/src/tracing/vercel-ai/utils.ts

Lines changed: 23 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,14 @@
11
import type { TraceContext } from '../../types-hoist/context';
2-
import type { Span, SpanJSON } from '../../types-hoist/span';
3-
import { GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE, GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE } from '../ai/gen-ai-attributes';
2+
import type { Span, SpanAttributes, SpanJSON } from '../../types-hoist/span';
3+
import {
4+
GEN_AI_REQUEST_MESSAGES_ATTRIBUTE,
5+
GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE,
6+
GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE,
7+
} from '../ai/gen-ai-attributes';
48
import { getTruncatedJsonString } from '../ai/utils';
59
import { toolCallSpanMap } from './constants';
610
import type { TokenSummary } from './types';
11+
import { AI_PROMPT_ATTRIBUTE, AI_PROMPT_MESSAGES_ATTRIBUTE } from './vercel-ai-attributes';
712

813
/**
914
* Accumulates token data from a span to its parent in the token accumulator map.
@@ -92,7 +97,7 @@ export function convertAvailableToolsToJsonString(tools: unknown[]): string {
9297
/**
9398
* Convert the prompt string to messages array
9499
*/
95-
export function convertPromptToMessages(prompt: string): { role: string; content: string }[] | undefined {
100+
export function convertPromptToMessages(prompt: string): { role: string; content: string }[] {
96101
try {
97102
const p = JSON.parse(prompt);
98103
if (!!p && typeof p === 'object') {
@@ -105,22 +110,30 @@ export function convertPromptToMessages(prompt: string): { role: string; content
105110
if (typeof prompt === 'string') {
106111
messages.push({ role: 'user', content: prompt });
107112
}
108-
return messages.length ? messages : [];
113+
return messages;
109114
}
110115
}
111116
// eslint-disable-next-line no-empty
112117
} catch {}
113-
return undefined;
118+
return [];
114119
}
115120

116121
/**
117122
* Generate a request.messages JSON array from the prompt field in the
118123
* invoke_agent op
119124
*/
120-
export function requestMessagesFromPrompt(span: Span, prompt: unknown): void {
121-
if (typeof prompt !== 'string') return;
122-
const maybeMessages = convertPromptToMessages(prompt);
123-
if (maybeMessages !== undefined) {
124-
span.setAttribute('gen_ai.request.messages', getTruncatedJsonString(maybeMessages));
125+
export function requestMessagesFromPrompt(span: Span, attributes: SpanAttributes): void {
126+
if (attributes[AI_PROMPT_ATTRIBUTE]) {
127+
const truncatedPrompt = getTruncatedJsonString(attributes[AI_PROMPT_ATTRIBUTE] as string | string[]);
128+
span.setAttribute('gen_ai.prompt', truncatedPrompt);
129+
}
130+
const prompt = attributes[AI_PROMPT_ATTRIBUTE];
131+
if (
132+
typeof prompt === 'string' &&
133+
!attributes[GEN_AI_REQUEST_MESSAGES_ATTRIBUTE] &&
134+
!attributes[AI_PROMPT_MESSAGES_ATTRIBUTE]
135+
) {
136+
const messages = convertPromptToMessages(prompt);
137+
if (messages.length) span.setAttribute(GEN_AI_REQUEST_MESSAGES_ATTRIBUTE, getTruncatedJsonString(messages));
125138
}
126139
}

0 commit comments

Comments
 (0)