From 6406fa1b210590754069b08b6c74acb7cda77ae6 Mon Sep 17 00:00:00 2001 From: Nicolas Hrubec Date: Thu, 27 Nov 2025 12:08:45 +0100 Subject: [PATCH 1/4] feat(browser): Expose langchain instrumentation --- .../tracing/ai-providers/langchain/init.js | 9 +++ .../tracing/ai-providers/langchain/mocks.js | 76 +++++++++++++++++++ .../tracing/ai-providers/langchain/subject.js | 21 +++++ .../tracing/ai-providers/langchain/test.ts | 37 +++++++++ .../utils/generatePlugin.ts | 1 + packages/browser/rollup.bundle.config.mjs | 1 + packages/browser/src/index.ts | 1 + .../index.createlangchaincallbackhandler.ts | 1 + .../browser/src/utils/lazyLoadIntegration.ts | 1 + 9 files changed, 148 insertions(+) create mode 100644 dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/init.js create mode 100644 dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/mocks.js create mode 100644 dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/subject.js create mode 100644 dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/test.ts create mode 100644 packages/browser/src/integrations-bundle/index.createlangchaincallbackhandler.ts diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/init.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/init.js new file mode 100644 index 000000000000..d90a3acf6157 --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/init.js @@ -0,0 +1,9 @@ +import * as Sentry from '@sentry/browser'; + +window.Sentry = Sentry; + +Sentry.init({ + dsn: 'https://public@dsn.ingest.sentry.io/1337', + tracesSampleRate: 1, + debug: true, +}); diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/mocks.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/mocks.js new file mode 100644 index 000000000000..46e6b33b60dd --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/mocks.js @@ -0,0 +1,76 @@ +// Mock LangChain Chat Model for browser testing +export class MockChatModel { + constructor(params) { + this._model = params.model; + this._temperature = params.temperature; + this._maxTokens = params.maxTokens; + } + + async invoke(messages, options) { + const callbacks = options?.callbacks || []; + const runId = "mock-run-id-123"; + + const invocationParams = { + model: this._model, + temperature: this._temperature, + max_tokens: this._maxTokens, + }; + + const serialized = { + lc: 1, + type: 'constructor', + id: ['langchain', 'anthropic', 'anthropic'], + kwargs: invocationParams, + }; + + // Call handleChatModelStart + for (const callback of callbacks) { + if (callback.handleChatModelStart) { + await callback.handleChatModelStart( + serialized, + messages, + runId, + undefined, + undefined, + { invocation_params: invocationParams }, + { ls_model_name: this._model, ls_provider: 'anthropic' }, + ); + } + } + + // Simulate processing time + await new Promise(resolve => setTimeout(resolve, 10)); + + // Create mock result + const result = { + generations: [ + [ + { + text: 'Mock response from LangChain!', + generationInfo: { + finish_reason: 'stop', + }, + }, + ], + ], + llmOutput: { + tokenUsage: { + promptTokens: 10, + completionTokens: 15, + totalTokens: 25, + }, + model_name: this._model, + id: 'msg_mock123', + }, + }; + + // Call handleLLMEnd + for (const callback of callbacks) { + if (callback.handleLLMEnd) { + await callback.handleLLMEnd(result, runId); + } + } + + return result; + } +} diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/subject.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/subject.js new file mode 100644 index 000000000000..aab1d2c7ede1 --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/subject.js @@ -0,0 +1,21 @@ +import { createLangChainCallbackHandler } from '@sentry/browser'; +import { MockChatModel } from './mocks.js'; + +const callbackHandler = createLangChainCallbackHandler({ + recordInputs: false, + recordOutputs: false, +}); + +const chatModel = new MockChatModel({ + model: 'claude-3-haiku-20240307', + temperature: 0.7, + maxTokens: 100, +}); + +// Test that manual instrumentation doesn't crash the browser +// The instrumentation automatically creates spans +const response = await chatModel.invoke('What is the capital of France?', { + callbacks: [callbackHandler], +}); + +console.log('Received response', response); diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/test.ts b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/test.ts new file mode 100644 index 000000000000..9cc1cc9ff98b --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/test.ts @@ -0,0 +1,37 @@ +import { expect } from '@playwright/test'; +import { sentryTest } from '../../../../utils/fixtures'; +import { envelopeRequestParser, waitForTransactionRequest } from '../../../../utils/helpers'; + +// These tests are not exhaustive because the instrumentation is +// already tested in the node integration tests and we merely +// want to test that the instrumentation does not crash in the browser +// and that gen_ai transactions are sent. + +sentryTest('manual LangChain instrumentation sends gen_ai transactions', async ({ getLocalTestUrl, page }) => { + const transactionPromise = waitForTransactionRequest(page, event => { + return !!event.transaction?.includes('claude-3-haiku-20240307'); + }); + + const url = await getLocalTestUrl({ testDir: __dirname }); + await page.goto(url); + + const req = await transactionPromise; + + const eventData = envelopeRequestParser(req); + + // Verify it's a gen_ai transaction + expect(eventData.transaction).toBe('chat claude-3-haiku-20240307'); + expect(eventData.contexts?.trace?.op).toBe('gen_ai.chat'); + expect(eventData.contexts?.trace?.origin).toBe('auto.ai.langchain'); + expect(eventData.contexts?.trace?.data).toMatchObject({ + 'gen_ai.operation.name': 'chat', + 'gen_ai.system': 'anthropic', + 'gen_ai.request.model': 'claude-3-haiku-20240307', + 'gen_ai.request.temperature': 0.7, + 'gen_ai.response.model': 'claude-3-haiku-20240307', + 'gen_ai.response.id': 'msg_mock123', + 'gen_ai.usage.input_tokens': 10, + 'gen_ai.usage.output_tokens': 15, + 'gen_ai.usage.total_tokens': 25, + }); +}); diff --git a/dev-packages/browser-integration-tests/utils/generatePlugin.ts b/dev-packages/browser-integration-tests/utils/generatePlugin.ts index 6e3ef99aa7ea..935fb053a57d 100644 --- a/dev-packages/browser-integration-tests/utils/generatePlugin.ts +++ b/dev-packages/browser-integration-tests/utils/generatePlugin.ts @@ -40,6 +40,7 @@ const IMPORTED_INTEGRATION_CDN_BUNDLE_PATHS: Record = { instrumentAnthropicAiClient: 'instrumentanthropicaiclient', instrumentOpenAiClient: 'instrumentopenaiclient', instrumentGoogleGenAIClient: 'instrumentgooglegenaiclient', + createLangChainCallbackHandler: 'createlangchaincallbackhandler', // technically, this is not an integration, but let's add it anyway for simplicity makeMultiplexedTransport: 'multiplexedtransport', }; diff --git a/packages/browser/rollup.bundle.config.mjs b/packages/browser/rollup.bundle.config.mjs index 4893e66f49ef..db39e9eb70d2 100644 --- a/packages/browser/rollup.bundle.config.mjs +++ b/packages/browser/rollup.bundle.config.mjs @@ -16,6 +16,7 @@ const reexportedPluggableIntegrationFiles = [ 'instrumentanthropicaiclient', 'instrumentopenaiclient', 'instrumentgooglegenaiclient', + 'createlangchaincallbackhandler', ]; browserPluggableIntegrationFiles.forEach(integrationName => { diff --git a/packages/browser/src/index.ts b/packages/browser/src/index.ts index 03416fa41af7..be1b611b5b75 100644 --- a/packages/browser/src/index.ts +++ b/packages/browser/src/index.ts @@ -66,6 +66,7 @@ export { instrumentAnthropicAiClient, instrumentOpenAiClient, instrumentGoogleGenAIClient, + createLangChainCallbackHandler, logger, } from '@sentry/core'; export type { Span, FeatureFlagsIntegration } from '@sentry/core'; diff --git a/packages/browser/src/integrations-bundle/index.createlangchaincallbackhandler.ts b/packages/browser/src/integrations-bundle/index.createlangchaincallbackhandler.ts new file mode 100644 index 000000000000..8a64eda45579 --- /dev/null +++ b/packages/browser/src/integrations-bundle/index.createlangchaincallbackhandler.ts @@ -0,0 +1 @@ +export { createLangChainCallbackHandler } from '@sentry/core'; diff --git a/packages/browser/src/utils/lazyLoadIntegration.ts b/packages/browser/src/utils/lazyLoadIntegration.ts index 6d5e48542f56..fd30a8a4ff46 100644 --- a/packages/browser/src/utils/lazyLoadIntegration.ts +++ b/packages/browser/src/utils/lazyLoadIntegration.ts @@ -24,6 +24,7 @@ const LazyLoadableIntegrations = { instrumentAnthropicAiClient: 'instrumentanthropicaiclient', instrumentOpenAiClient: 'instrumentopenaiclient', instrumentGoogleGenAIClient: 'instrumentgooglegenaiclient', + createLangChainCallbackHandler: 'createlangchaincallbackhandler', } as const; const WindowWithMaybeIntegration = WINDOW as { From cf5e56cd4b684aeb35aa04654e2fb83596f2ae93 Mon Sep 17 00:00:00 2001 From: Nicolas Hrubec Date: Thu, 27 Nov 2025 13:07:25 +0100 Subject: [PATCH 2/4] . --- .../suites/tracing/ai-providers/langchain/mocks.js | 4 ++-- .../suites/tracing/ai-providers/langchain/subject.js | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/mocks.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/mocks.js index 46e6b33b60dd..8b1a0e900c9a 100644 --- a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/mocks.js +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/mocks.js @@ -1,5 +1,5 @@ // Mock LangChain Chat Model for browser testing -export class MockChatModel { +export class MockChatAnthropic { constructor(params) { this._model = params.model; this._temperature = params.temperature; @@ -46,7 +46,7 @@ export class MockChatModel { generations: [ [ { - text: 'Mock response from LangChain!', + text: 'Mock response from Anthropic!', generationInfo: { finish_reason: 'stop', }, diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/subject.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/subject.js index aab1d2c7ede1..af3000985c59 100644 --- a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/subject.js +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/subject.js @@ -1,12 +1,12 @@ import { createLangChainCallbackHandler } from '@sentry/browser'; -import { MockChatModel } from './mocks.js'; +import { MockChatAnthropic } from './mocks.js'; const callbackHandler = createLangChainCallbackHandler({ recordInputs: false, recordOutputs: false, }); -const chatModel = new MockChatModel({ +const chatModel = new MockChatAnthropic({ model: 'claude-3-haiku-20240307', temperature: 0.7, maxTokens: 100, From 332c8e3f23412ac741a6650c0bb8a717c24400c2 Mon Sep 17 00:00:00 2001 From: Nicolas Hrubec Date: Thu, 27 Nov 2025 13:08:41 +0100 Subject: [PATCH 3/4] format --- .../suites/tracing/ai-providers/langchain/mocks.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/mocks.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/mocks.js index 8b1a0e900c9a..a39cf0d81669 100644 --- a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/mocks.js +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/mocks.js @@ -8,7 +8,7 @@ export class MockChatAnthropic { async invoke(messages, options) { const callbacks = options?.callbacks || []; - const runId = "mock-run-id-123"; + const runId = 'mock-run-id-123'; const invocationParams = { model: this._model, From 9547c6178c3b258bdd90ddde88caa99406cc6bba Mon Sep 17 00:00:00 2001 From: Nicolas Hrubec Date: Thu, 27 Nov 2025 13:27:31 +0100 Subject: [PATCH 4/4] . --- .../suites/tracing/ai-providers/langchain/mocks.js | 4 ++-- .../suites/tracing/ai-providers/langchain/subject.js | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/mocks.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/mocks.js index a39cf0d81669..e7d4dbf00961 100644 --- a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/mocks.js +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/mocks.js @@ -6,8 +6,8 @@ export class MockChatAnthropic { this._maxTokens = params.maxTokens; } - async invoke(messages, options) { - const callbacks = options?.callbacks || []; + async invoke(messages, config = { callbacks: [] }) { + const callbacks = config.callbacks; const runId = 'mock-run-id-123'; const invocationParams = { diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/subject.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/subject.js index af3000985c59..3df04acd505b 100644 --- a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/subject.js +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/subject.js @@ -14,6 +14,7 @@ const chatModel = new MockChatAnthropic({ // Test that manual instrumentation doesn't crash the browser // The instrumentation automatically creates spans +// We can provide callbacks in the config object:https://docs.langchain.com/oss/python/langchain/models#invocation-config const response = await chatModel.invoke('What is the capital of France?', { callbacks: [callbackHandler], });