Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
import * as Sentry from '@sentry/browser';

window.Sentry = Sentry;

Sentry.init({
dsn: 'https://public@dsn.ingest.sentry.io/1337',
tracesSampleRate: 1,
debug: true,
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
// Mock LangChain Chat Model for browser testing
export class MockChatAnthropic {
constructor(params) {
this._model = params.model;
this._temperature = params.temperature;
this._maxTokens = params.maxTokens;
}

async invoke(messages, config = { callbacks: [] }) {
const callbacks = config.callbacks;
const runId = 'mock-run-id-123';

const invocationParams = {
model: this._model,
temperature: this._temperature,
max_tokens: this._maxTokens,
};

const serialized = {
lc: 1,
type: 'constructor',
id: ['langchain', 'anthropic', 'anthropic'],
kwargs: invocationParams,
};

// Call handleChatModelStart
for (const callback of callbacks) {
if (callback.handleChatModelStart) {
await callback.handleChatModelStart(
serialized,
messages,
runId,
undefined,
undefined,
{ invocation_params: invocationParams },
{ ls_model_name: this._model, ls_provider: 'anthropic' },
);
}
}

// Simulate processing time
await new Promise(resolve => setTimeout(resolve, 10));

// Create mock result
const result = {
generations: [
[
{
text: 'Mock response from Anthropic!',
generationInfo: {
finish_reason: 'stop',
},
},
],
],
llmOutput: {
tokenUsage: {
promptTokens: 10,
completionTokens: 15,
totalTokens: 25,
},
model_name: this._model,
id: 'msg_mock123',
},
};

// Call handleLLMEnd
for (const callback of callbacks) {
if (callback.handleLLMEnd) {
await callback.handleLLMEnd(result, runId);
}
}

return result;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import { createLangChainCallbackHandler } from '@sentry/browser';
import { MockChatAnthropic } from './mocks.js';

const callbackHandler = createLangChainCallbackHandler({
recordInputs: false,
recordOutputs: false,
});

const chatModel = new MockChatAnthropic({
model: 'claude-3-haiku-20240307',
temperature: 0.7,
maxTokens: 100,
});

// Test that manual instrumentation doesn't crash the browser
// The instrumentation automatically creates spans
// We can provide callbacks in the config object:https://docs.langchain.com/oss/python/langchain/models#invocation-config
const response = await chatModel.invoke('What is the capital of France?', {
callbacks: [callbackHandler],
});

console.log('Received response', response);
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import { expect } from '@playwright/test';
import { sentryTest } from '../../../../utils/fixtures';
import { envelopeRequestParser, waitForTransactionRequest } from '../../../../utils/helpers';

// These tests are not exhaustive because the instrumentation is
// already tested in the node integration tests and we merely
// want to test that the instrumentation does not crash in the browser
// and that gen_ai transactions are sent.

sentryTest('manual LangChain instrumentation sends gen_ai transactions', async ({ getLocalTestUrl, page }) => {
const transactionPromise = waitForTransactionRequest(page, event => {
return !!event.transaction?.includes('claude-3-haiku-20240307');
});

const url = await getLocalTestUrl({ testDir: __dirname });
await page.goto(url);

const req = await transactionPromise;

const eventData = envelopeRequestParser(req);

// Verify it's a gen_ai transaction
expect(eventData.transaction).toBe('chat claude-3-haiku-20240307');
expect(eventData.contexts?.trace?.op).toBe('gen_ai.chat');
expect(eventData.contexts?.trace?.origin).toBe('auto.ai.langchain');
expect(eventData.contexts?.trace?.data).toMatchObject({
'gen_ai.operation.name': 'chat',
'gen_ai.system': 'anthropic',
'gen_ai.request.model': 'claude-3-haiku-20240307',
'gen_ai.request.temperature': 0.7,
'gen_ai.response.model': 'claude-3-haiku-20240307',
'gen_ai.response.id': 'msg_mock123',
'gen_ai.usage.input_tokens': 10,
'gen_ai.usage.output_tokens': 15,
'gen_ai.usage.total_tokens': 25,
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ const IMPORTED_INTEGRATION_CDN_BUNDLE_PATHS: Record<string, string> = {
instrumentOpenAiClient: 'instrumentopenaiclient',
instrumentGoogleGenAIClient: 'instrumentgooglegenaiclient',
instrumentLangGraph: 'instrumentlanggraph',
createLangChainCallbackHandler: 'createlangchaincallbackhandler',
// technically, this is not an integration, but let's add it anyway for simplicity
makeMultiplexedTransport: 'multiplexedtransport',
};
Expand Down
1 change: 1 addition & 0 deletions packages/browser/rollup.bundle.config.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ const reexportedPluggableIntegrationFiles = [
'instrumentopenaiclient',
'instrumentgooglegenaiclient',
'instrumentlanggraph',
'createlangchaincallbackhandler',
];

browserPluggableIntegrationFiles.forEach(integrationName => {
Expand Down
1 change: 1 addition & 0 deletions packages/browser/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ export {
instrumentOpenAiClient,
instrumentGoogleGenAIClient,
instrumentLangGraph,
createLangChainCallbackHandler,
logger,
} from '@sentry/core';
export type { Span, FeatureFlagsIntegration } from '@sentry/core';
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
export { createLangChainCallbackHandler } from '@sentry/core';
1 change: 1 addition & 0 deletions packages/browser/src/utils/lazyLoadIntegration.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ const LazyLoadableIntegrations = {
instrumentOpenAiClient: 'instrumentopenaiclient',
instrumentGoogleGenAIClient: 'instrumentgooglegenaiclient',
instrumentLangGraph: 'instrumentlanggraph',
createLangChainCallbackHandler: 'createlangchaincallbackhandler',
} as const;

const WindowWithMaybeIntegration = WINDOW as {
Expand Down