diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/init.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/init.js new file mode 100644 index 000000000000..d90a3acf6157 --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/init.js @@ -0,0 +1,9 @@ +import * as Sentry from '@sentry/browser'; + +window.Sentry = Sentry; + +Sentry.init({ + dsn: 'https://public@dsn.ingest.sentry.io/1337', + tracesSampleRate: 1, + debug: true, +}); diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/mocks.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/mocks.js new file mode 100644 index 000000000000..e7d4dbf00961 --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/mocks.js @@ -0,0 +1,76 @@ +// Mock LangChain Chat Model for browser testing +export class MockChatAnthropic { + constructor(params) { + this._model = params.model; + this._temperature = params.temperature; + this._maxTokens = params.maxTokens; + } + + async invoke(messages, config = { callbacks: [] }) { + const callbacks = config.callbacks; + const runId = 'mock-run-id-123'; + + const invocationParams = { + model: this._model, + temperature: this._temperature, + max_tokens: this._maxTokens, + }; + + const serialized = { + lc: 1, + type: 'constructor', + id: ['langchain', 'anthropic', 'anthropic'], + kwargs: invocationParams, + }; + + // Call handleChatModelStart + for (const callback of callbacks) { + if (callback.handleChatModelStart) { + await callback.handleChatModelStart( + serialized, + messages, + runId, + undefined, + undefined, + { invocation_params: invocationParams }, + { ls_model_name: this._model, ls_provider: 'anthropic' }, + ); + } + } + + // Simulate processing time + await new Promise(resolve => setTimeout(resolve, 10)); + + // Create mock result + const result = { + generations: [ + [ + { + text: 'Mock response from Anthropic!', + generationInfo: { + finish_reason: 'stop', + }, + }, + ], + ], + llmOutput: { + tokenUsage: { + promptTokens: 10, + completionTokens: 15, + totalTokens: 25, + }, + model_name: this._model, + id: 'msg_mock123', + }, + }; + + // Call handleLLMEnd + for (const callback of callbacks) { + if (callback.handleLLMEnd) { + await callback.handleLLMEnd(result, runId); + } + } + + return result; + } +} diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/subject.js b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/subject.js new file mode 100644 index 000000000000..3df04acd505b --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/subject.js @@ -0,0 +1,22 @@ +import { createLangChainCallbackHandler } from '@sentry/browser'; +import { MockChatAnthropic } from './mocks.js'; + +const callbackHandler = createLangChainCallbackHandler({ + recordInputs: false, + recordOutputs: false, +}); + +const chatModel = new MockChatAnthropic({ + model: 'claude-3-haiku-20240307', + temperature: 0.7, + maxTokens: 100, +}); + +// Test that manual instrumentation doesn't crash the browser +// The instrumentation automatically creates spans +// We can provide callbacks in the config object:https://docs.langchain.com/oss/python/langchain/models#invocation-config +const response = await chatModel.invoke('What is the capital of France?', { + callbacks: [callbackHandler], +}); + +console.log('Received response', response); diff --git a/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/test.ts b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/test.ts new file mode 100644 index 000000000000..9cc1cc9ff98b --- /dev/null +++ b/dev-packages/browser-integration-tests/suites/tracing/ai-providers/langchain/test.ts @@ -0,0 +1,37 @@ +import { expect } from '@playwright/test'; +import { sentryTest } from '../../../../utils/fixtures'; +import { envelopeRequestParser, waitForTransactionRequest } from '../../../../utils/helpers'; + +// These tests are not exhaustive because the instrumentation is +// already tested in the node integration tests and we merely +// want to test that the instrumentation does not crash in the browser +// and that gen_ai transactions are sent. + +sentryTest('manual LangChain instrumentation sends gen_ai transactions', async ({ getLocalTestUrl, page }) => { + const transactionPromise = waitForTransactionRequest(page, event => { + return !!event.transaction?.includes('claude-3-haiku-20240307'); + }); + + const url = await getLocalTestUrl({ testDir: __dirname }); + await page.goto(url); + + const req = await transactionPromise; + + const eventData = envelopeRequestParser(req); + + // Verify it's a gen_ai transaction + expect(eventData.transaction).toBe('chat claude-3-haiku-20240307'); + expect(eventData.contexts?.trace?.op).toBe('gen_ai.chat'); + expect(eventData.contexts?.trace?.origin).toBe('auto.ai.langchain'); + expect(eventData.contexts?.trace?.data).toMatchObject({ + 'gen_ai.operation.name': 'chat', + 'gen_ai.system': 'anthropic', + 'gen_ai.request.model': 'claude-3-haiku-20240307', + 'gen_ai.request.temperature': 0.7, + 'gen_ai.response.model': 'claude-3-haiku-20240307', + 'gen_ai.response.id': 'msg_mock123', + 'gen_ai.usage.input_tokens': 10, + 'gen_ai.usage.output_tokens': 15, + 'gen_ai.usage.total_tokens': 25, + }); +}); diff --git a/dev-packages/browser-integration-tests/utils/generatePlugin.ts b/dev-packages/browser-integration-tests/utils/generatePlugin.ts index 548dfb2a6150..b1b1df410ca3 100644 --- a/dev-packages/browser-integration-tests/utils/generatePlugin.ts +++ b/dev-packages/browser-integration-tests/utils/generatePlugin.ts @@ -41,6 +41,7 @@ const IMPORTED_INTEGRATION_CDN_BUNDLE_PATHS: Record = { instrumentOpenAiClient: 'instrumentopenaiclient', instrumentGoogleGenAIClient: 'instrumentgooglegenaiclient', instrumentLangGraph: 'instrumentlanggraph', + createLangChainCallbackHandler: 'createlangchaincallbackhandler', // technically, this is not an integration, but let's add it anyway for simplicity makeMultiplexedTransport: 'multiplexedtransport', }; diff --git a/packages/browser/rollup.bundle.config.mjs b/packages/browser/rollup.bundle.config.mjs index 13bdee685821..57f1bd80b748 100644 --- a/packages/browser/rollup.bundle.config.mjs +++ b/packages/browser/rollup.bundle.config.mjs @@ -17,6 +17,7 @@ const reexportedPluggableIntegrationFiles = [ 'instrumentopenaiclient', 'instrumentgooglegenaiclient', 'instrumentlanggraph', + 'createlangchaincallbackhandler', ]; browserPluggableIntegrationFiles.forEach(integrationName => { diff --git a/packages/browser/src/index.ts b/packages/browser/src/index.ts index a58714f312d7..149068892f54 100644 --- a/packages/browser/src/index.ts +++ b/packages/browser/src/index.ts @@ -67,6 +67,7 @@ export { instrumentOpenAiClient, instrumentGoogleGenAIClient, instrumentLangGraph, + createLangChainCallbackHandler, logger, } from '@sentry/core'; export type { Span, FeatureFlagsIntegration } from '@sentry/core'; diff --git a/packages/browser/src/integrations-bundle/index.createlangchaincallbackhandler.ts b/packages/browser/src/integrations-bundle/index.createlangchaincallbackhandler.ts new file mode 100644 index 000000000000..8a64eda45579 --- /dev/null +++ b/packages/browser/src/integrations-bundle/index.createlangchaincallbackhandler.ts @@ -0,0 +1 @@ +export { createLangChainCallbackHandler } from '@sentry/core'; diff --git a/packages/browser/src/utils/lazyLoadIntegration.ts b/packages/browser/src/utils/lazyLoadIntegration.ts index 8eeb6b95b66b..8a6688fe5953 100644 --- a/packages/browser/src/utils/lazyLoadIntegration.ts +++ b/packages/browser/src/utils/lazyLoadIntegration.ts @@ -25,6 +25,7 @@ const LazyLoadableIntegrations = { instrumentOpenAiClient: 'instrumentopenaiclient', instrumentGoogleGenAIClient: 'instrumentgooglegenaiclient', instrumentLangGraph: 'instrumentlanggraph', + createLangChainCallbackHandler: 'createlangchaincallbackhandler', } as const; const WindowWithMaybeIntegration = WINDOW as {