From d26be89c689e75e38a6646f1fe83d843aca0784a Mon Sep 17 00:00:00 2001 From: Nicolas Hrubec Date: Thu, 27 Nov 2025 16:57:04 +0100 Subject: [PATCH 1/2] feat(browser): Expose vercelai instrumentation --- .../tracing/ai-providers/vercel-ai/init.js | 9 +++ .../tracing/ai-providers/vercel-ai/mocks.js | 55 +++++++++++++++++++ .../tracing/ai-providers/vercel-ai/subject.js | 15 +++++ .../tracing/ai-providers/vercel-ai/test.ts | 29 ++++++++++ .../utils/generatePlugin.ts | 1 + packages/browser/rollup.bundle.config.mjs | 1 + packages/browser/src/index.ts | 1 + .../index.addvercelaiprocessors.ts | 1 + .../browser/src/utils/lazyLoadIntegration.ts | 1 + 9 files changed, 113 insertions(+) create mode 100644 dev-packages/browser-integration-tests/suites/tracing/ai-providers/vercel-ai/init.js create mode 100644 dev-packages/browser-integration-tests/suites/tracing/ai-providers/vercel-ai/mocks.js create mode 100644 dev-packages/browser-integration-tests/suites/tracing/ai-providers/vercel-ai/subject.js create mode 100644 dev-packages/browser-integration-tests/suites/tracing/ai-providers/vercel-ai/test.ts create mode 100644 packages/browser/src/integrations-bundle/index.addvercelaiprocessors.ts diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/vercel-ai/init.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/vercel-ai/init.js new file mode 100644 index 000000000000..d90a3acf6157 --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/vercel-ai/init.js @@ -0,0 +1,9 @@ +import * as Sentry from '@sentry/browser'; + +window.Sentry = Sentry; + +Sentry.init({ + dsn: 'https://public@dsn.ingest.sentry.io/1337', + tracesSampleRate: 1, + debug: true, +}); diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/vercel-ai/mocks.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/vercel-ai/mocks.js new file mode 100644 index 000000000000..c80e22916e51 --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/vercel-ai/mocks.js @@ -0,0 +1,55 @@ +class MockLanguageModelV1 { + constructor(config) { + this.doGenerate = config.doGenerate; + } +} + +export const mockModelBasic = new MockLanguageModelV1({ + doGenerate: async () => ({ + rawCall: { rawPrompt: null, rawSettings: {} }, + finishReason: 'stop', + usage: { promptTokens: 10, completionTokens: 20 }, + text: 'Mock response from model', + }), +}); + +// Mock implementation of generateText that uses the mock models +export async function mockGenerateText(options) { + const model = options.model; + + return await window.Sentry.startSpan( + { + name: 'ai.generateText', + attributes: { + 'ai.model.id': 'gpt-4-turbo', + 'ai.model.provider': 'openai', + }, + }, + async () => { + const result = await model.doGenerate(); + + return await window.Sentry.startSpan( + { + name: 'ai.generateText.doGenerate', + attributes: { + 'ai.model.id': 'gpt-4-turbo', + 'ai.model.provider': 'openai', + 'ai.prompt': options.prompt, + 'ai.response.text': result.text, + 'ai.usage.promptTokens': result.usage.promptTokens, + 'ai.usage.completionTokens': result.usage.completionTokens, + }, + }, + async () => { + // Simulate processing time + await new Promise(resolve => setTimeout(resolve, 10)); + + return { + text: result.text, + usage: result.usage, + }; + }, + ); + }, + ); +} diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/vercel-ai/subject.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/vercel-ai/subject.js new file mode 100644 index 000000000000..1c5584d4543d --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/vercel-ai/subject.js @@ -0,0 +1,15 @@ +import { addVercelAiProcessors, getClient } from '@sentry/browser'; +import { mockGenerateText, mockModelBasic } from './mocks.js'; + +console.log('getting client'); +const client = getClient(); +console.log('adding processors'); +addVercelAiProcessors(client); + +console.log('running generateText with mock model'); +const result = await mockGenerateText({ + model: mockModelBasic, + prompt: 'Test prompt', +}); + +console.log('Generated text result:', result); diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/vercel-ai/test.ts b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/vercel-ai/test.ts new file mode 100644 index 000000000000..db0d8facc577 --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/vercel-ai/test.ts @@ -0,0 +1,29 @@ +import { expect } from '@playwright/test'; +import { sentryTest } from '../../../../utils/fixtures'; +import { envelopeRequestParser, waitForTransactionRequest } from '../../../../utils/helpers'; + +// These tests are not exhaustive because the instrumentation is +// already tested in the node integration tests and we merely +// want to test that the instrumentation does not crash in the browser +// and that gen_ai transactions are sent. + +sentryTest('manual Vercel AI instrumentation sends gen_ai transactions', async ({ getLocalTestUrl, page }) => { + const transactionPromise = waitForTransactionRequest(page, event => { + return !!event.transaction?.includes('generateText'); + }); + + const url = await getLocalTestUrl({ testDir: __dirname }); + await page.goto(url); + + const req = await transactionPromise; + + const eventData = envelopeRequestParser(req); + + // Verify it's a gen_ai transaction + expect(eventData.transaction).toBe('generateText'); + expect(eventData.contexts?.trace?.op).toBe('gen_ai.invoke_agent'); + expect(eventData.contexts?.trace?.origin).toBe('auto.vercelai.otel'); + expect(eventData.contexts?.trace?.data).toMatchObject({ + 'gen_ai.response.model': 'gpt-4-turbo', + }); +}); diff --git a/dev-packages/browser-integration-tests/utils/generatePlugin.ts b/dev-packages/browser-integration-tests/utils/generatePlugin.ts index 548dfb2a6150..6de5c7707197 100644 --- a/dev-packages/browser-integration-tests/utils/generatePlugin.ts +++ b/dev-packages/browser-integration-tests/utils/generatePlugin.ts @@ -41,6 +41,7 @@ const IMPORTED_INTEGRATION_CDN_BUNDLE_PATHS: Record = { instrumentOpenAiClient: 'instrumentopenaiclient', instrumentGoogleGenAIClient: 'instrumentgooglegenaiclient', instrumentLangGraph: 'instrumentlanggraph', + addVercelAiProcessors: 'addvercelaiprocessors', // technically, this is not an integration, but let's add it anyway for simplicity makeMultiplexedTransport: 'multiplexedtransport', }; diff --git a/packages/browser/rollup.bundle.config.mjs b/packages/browser/rollup.bundle.config.mjs index 13bdee685821..d98100a2ad0a 100644 --- a/packages/browser/rollup.bundle.config.mjs +++ b/packages/browser/rollup.bundle.config.mjs @@ -17,6 +17,7 @@ const reexportedPluggableIntegrationFiles = [ 'instrumentopenaiclient', 'instrumentgooglegenaiclient', 'instrumentlanggraph', + 'addvercelaiprocessors', ]; browserPluggableIntegrationFiles.forEach(integrationName => { diff --git a/packages/browser/src/index.ts b/packages/browser/src/index.ts index a58714f312d7..9e2fd955d770 100644 --- a/packages/browser/src/index.ts +++ b/packages/browser/src/index.ts @@ -67,6 +67,7 @@ export { instrumentOpenAiClient, instrumentGoogleGenAIClient, instrumentLangGraph, + addVercelAiProcessors, logger, } from '@sentry/core'; export type { Span, FeatureFlagsIntegration } from '@sentry/core'; diff --git a/packages/browser/src/integrations-bundle/index.addvercelaiprocessors.ts b/packages/browser/src/integrations-bundle/index.addvercelaiprocessors.ts new file mode 100644 index 000000000000..c44e46fa788b --- /dev/null +++ b/packages/browser/src/integrations-bundle/index.addvercelaiprocessors.ts @@ -0,0 +1 @@ +export { addVercelAiProcessors } from '@sentry/core'; diff --git a/packages/browser/src/utils/lazyLoadIntegration.ts b/packages/browser/src/utils/lazyLoadIntegration.ts index 8eeb6b95b66b..7e7f6a8fbb18 100644 --- a/packages/browser/src/utils/lazyLoadIntegration.ts +++ b/packages/browser/src/utils/lazyLoadIntegration.ts @@ -25,6 +25,7 @@ const LazyLoadableIntegrations = { instrumentOpenAiClient: 'instrumentopenaiclient', instrumentGoogleGenAIClient: 'instrumentgooglegenaiclient', instrumentLangGraph: 'instrumentlanggraph', + addVercelAiProcessors: 'addvercelaiprocessors', } as const; const WindowWithMaybeIntegration = WINDOW as { From 244f9cbae6c40377a784d489bec0942afe5c5f02 Mon Sep 17 00:00:00 2001 From: Nicolas Hrubec Date: Thu, 27 Nov 2025 16:57:43 +0100 Subject: [PATCH 2/2] . --- .../suites/tracing/ai-providers/vercel-ai/subject.js | 3 --- 1 file changed, 3 deletions(-) diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/vercel-ai/subject.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/vercel-ai/subject.js index 1c5584d4543d..4419b6cd95a0 100644 --- a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/vercel-ai/subject.js +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/vercel-ai/subject.js @@ -1,12 +1,9 @@ import { addVercelAiProcessors, getClient } from '@sentry/browser'; import { mockGenerateText, mockModelBasic } from './mocks.js'; -console.log('getting client'); const client = getClient(); -console.log('adding processors'); addVercelAiProcessors(client); -console.log('running generateText with mock model'); const result = await mockGenerateText({ model: mockModelBasic, prompt: 'Test prompt',