Skip to content

Commit de270e2

Browse files
authored
feat(browser): Expose langchain instrumentation (#18342)
We are exposing AI instrument methods for Langchain enabling users to use it directly from the browser SDK.
1 parent b822f8f commit de270e2

File tree

9 files changed

+149
-0
lines changed

9 files changed

+149
-0
lines changed
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
import * as Sentry from '@sentry/browser';
2+
3+
window.Sentry = Sentry;
4+
5+
Sentry.init({
6+
dsn: 'https://public@dsn.ingest.sentry.io/1337',
7+
tracesSampleRate: 1,
8+
debug: true,
9+
});
Lines changed: 76 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,76 @@
1+
// Mock LangChain Chat Model for browser testing
2+
export class MockChatAnthropic {
3+
constructor(params) {
4+
this._model = params.model;
5+
this._temperature = params.temperature;
6+
this._maxTokens = params.maxTokens;
7+
}
8+
9+
async invoke(messages, config = { callbacks: [] }) {
10+
const callbacks = config.callbacks;
11+
const runId = 'mock-run-id-123';
12+
13+
const invocationParams = {
14+
model: this._model,
15+
temperature: this._temperature,
16+
max_tokens: this._maxTokens,
17+
};
18+
19+
const serialized = {
20+
lc: 1,
21+
type: 'constructor',
22+
id: ['langchain', 'anthropic', 'anthropic'],
23+
kwargs: invocationParams,
24+
};
25+
26+
// Call handleChatModelStart
27+
for (const callback of callbacks) {
28+
if (callback.handleChatModelStart) {
29+
await callback.handleChatModelStart(
30+
serialized,
31+
messages,
32+
runId,
33+
undefined,
34+
undefined,
35+
{ invocation_params: invocationParams },
36+
{ ls_model_name: this._model, ls_provider: 'anthropic' },
37+
);
38+
}
39+
}
40+
41+
// Simulate processing time
42+
await new Promise(resolve => setTimeout(resolve, 10));
43+
44+
// Create mock result
45+
const result = {
46+
generations: [
47+
[
48+
{
49+
text: 'Mock response from Anthropic!',
50+
generationInfo: {
51+
finish_reason: 'stop',
52+
},
53+
},
54+
],
55+
],
56+
llmOutput: {
57+
tokenUsage: {
58+
promptTokens: 10,
59+
completionTokens: 15,
60+
totalTokens: 25,
61+
},
62+
model_name: this._model,
63+
id: 'msg_mock123',
64+
},
65+
};
66+
67+
// Call handleLLMEnd
68+
for (const callback of callbacks) {
69+
if (callback.handleLLMEnd) {
70+
await callback.handleLLMEnd(result, runId);
71+
}
72+
}
73+
74+
return result;
75+
}
76+
}
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
import { createLangChainCallbackHandler } from '@sentry/browser';
2+
import { MockChatAnthropic } from './mocks.js';
3+
4+
const callbackHandler = createLangChainCallbackHandler({
5+
recordInputs: false,
6+
recordOutputs: false,
7+
});
8+
9+
const chatModel = new MockChatAnthropic({
10+
model: 'claude-3-haiku-20240307',
11+
temperature: 0.7,
12+
maxTokens: 100,
13+
});
14+
15+
// Test that manual instrumentation doesn't crash the browser
16+
// The instrumentation automatically creates spans
17+
// We can provide callbacks in the config object:https://docs.langchain.com/oss/python/langchain/models#invocation-config
18+
const response = await chatModel.invoke('What is the capital of France?', {
19+
callbacks: [callbackHandler],
20+
});
21+
22+
console.log('Received response', response);
Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
import { expect } from '@playwright/test';
2+
import { sentryTest } from '../../../../utils/fixtures';
3+
import { envelopeRequestParser, waitForTransactionRequest } from '../../../../utils/helpers';
4+
5+
// These tests are not exhaustive because the instrumentation is
6+
// already tested in the node integration tests and we merely
7+
// want to test that the instrumentation does not crash in the browser
8+
// and that gen_ai transactions are sent.
9+
10+
sentryTest('manual LangChain instrumentation sends gen_ai transactions', async ({ getLocalTestUrl, page }) => {
11+
const transactionPromise = waitForTransactionRequest(page, event => {
12+
return !!event.transaction?.includes('claude-3-haiku-20240307');
13+
});
14+
15+
const url = await getLocalTestUrl({ testDir: __dirname });
16+
await page.goto(url);
17+
18+
const req = await transactionPromise;
19+
20+
const eventData = envelopeRequestParser(req);
21+
22+
// Verify it's a gen_ai transaction
23+
expect(eventData.transaction).toBe('chat claude-3-haiku-20240307');
24+
expect(eventData.contexts?.trace?.op).toBe('gen_ai.chat');
25+
expect(eventData.contexts?.trace?.origin).toBe('auto.ai.langchain');
26+
expect(eventData.contexts?.trace?.data).toMatchObject({
27+
'gen_ai.operation.name': 'chat',
28+
'gen_ai.system': 'anthropic',
29+
'gen_ai.request.model': 'claude-3-haiku-20240307',
30+
'gen_ai.request.temperature': 0.7,
31+
'gen_ai.response.model': 'claude-3-haiku-20240307',
32+
'gen_ai.response.id': 'msg_mock123',
33+
'gen_ai.usage.input_tokens': 10,
34+
'gen_ai.usage.output_tokens': 15,
35+
'gen_ai.usage.total_tokens': 25,
36+
});
37+
});

dev-packages/browser-integration-tests/utils/generatePlugin.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,7 @@ const IMPORTED_INTEGRATION_CDN_BUNDLE_PATHS: Record<string, string> = {
4141
instrumentOpenAiClient: 'instrumentopenaiclient',
4242
instrumentGoogleGenAIClient: 'instrumentgooglegenaiclient',
4343
instrumentLangGraph: 'instrumentlanggraph',
44+
createLangChainCallbackHandler: 'createlangchaincallbackhandler',
4445
// technically, this is not an integration, but let's add it anyway for simplicity
4546
makeMultiplexedTransport: 'multiplexedtransport',
4647
};

packages/browser/rollup.bundle.config.mjs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ const reexportedPluggableIntegrationFiles = [
1717
'instrumentopenaiclient',
1818
'instrumentgooglegenaiclient',
1919
'instrumentlanggraph',
20+
'createlangchaincallbackhandler',
2021
];
2122

2223
browserPluggableIntegrationFiles.forEach(integrationName => {

packages/browser/src/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,7 @@ export {
6767
instrumentOpenAiClient,
6868
instrumentGoogleGenAIClient,
6969
instrumentLangGraph,
70+
createLangChainCallbackHandler,
7071
logger,
7172
} from '@sentry/core';
7273
export type { Span, FeatureFlagsIntegration } from '@sentry/core';
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
export { createLangChainCallbackHandler } from '@sentry/core';

packages/browser/src/utils/lazyLoadIntegration.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ const LazyLoadableIntegrations = {
2525
instrumentOpenAiClient: 'instrumentopenaiclient',
2626
instrumentGoogleGenAIClient: 'instrumentgooglegenaiclient',
2727
instrumentLangGraph: 'instrumentlanggraph',
28+
createLangChainCallbackHandler: 'createlangchaincallbackhandler',
2829
} as const;
2930

3031
const WindowWithMaybeIntegration = WINDOW as {

0 commit comments

Comments
 (0)