Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/forward-fetch-headers-options.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"@browserbasehq/stagehand": patch
---

fix: forward fetch and headers options to AI SDK providers to enable proxy authentication, request logging, and custom retry logic
82 changes: 82 additions & 0 deletions packages/core/examples/test-custom-fetch.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
import { Stagehand } from "../lib/v3";

/**
* Test script to verify custom fetch and headers are forwarded to AI SDK providers
*
* This demonstrates the fix for the bug where custom fetch functions and headers
* were being silently ignored when using AI SDK providers (e.g., "openai/gpt-4o-mini").
*
* Expected behavior:
* - Custom fetch function should be called for all LLM API requests
* - Custom headers should be included in the requests
* - This enables use cases like: proxy authentication, request logging, retry logic
*/

async function main() {
// Track if custom fetch was called
let fetchCallCount = 0;
const customHeaders: string[] = [];

// Create custom fetch function
const customFetch: typeof fetch = async (url, options) => {
fetchCallCount++;
console.log(`✅ Custom fetch called (${fetchCallCount} times)`);
console.log(` URL: ${url}`);

// Log custom headers if present
if (options?.headers) {
const headers = new Headers(options.headers);
headers.forEach((value, key) => {
if (key.toLowerCase().startsWith('x-custom')) {
customHeaders.push(`${key}: ${value}`);
console.log(` Custom header: ${key}: ${value}`);
}
});
}

return fetch(url, options);
};

// Initialize Stagehand with custom fetch and headers
console.log("Initializing Stagehand with custom fetch and headers...\n");

const stagehand = new Stagehand({
model: {
modelName: "openai/gpt-4o-mini",
apiKey: process.env.OPENAI_API_KEY,
fetch: customFetch,
headers: {
"X-Custom-Header": "test-value",
"X-Custom-Proxy-Auth": "proxy-token-123"
}
} as any,
env: "LOCAL"
});

await stagehand.init();

try {
console.log("Making a simple LLM call via act()...\n");

// Navigate to a simple page
await stagehand.context.pages()[0].goto("https://example.com");

// Make an act() call that will use the LLM
await stagehand.act("find the heading on the page");

console.log("\n=== Test Results ===");
if (fetchCallCount > 0) {
console.log(`✅ SUCCESS: Custom fetch was called ${fetchCallCount} times`);
console.log(`✅ Custom headers detected: ${customHeaders.length > 0 ? customHeaders.join(", ") : "None (may be overridden by SDK)"}`);
} else {
console.log("❌ FAILURE: Custom fetch was NOT called");
console.log(" This indicates the bug still exists.");
}
} catch (error) {
console.error("\n❌ Error during test:", error);
} finally {
await stagehand.close();
}
}

main().catch(console.error);
65 changes: 41 additions & 24 deletions packages/core/lib/v3/llm/LLMProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,11 @@ import { GoogleClient } from "./GoogleClient";
import { GroqClient } from "./GroqClient";
import { LLMClient } from "./LLMClient";
import { OpenAIClient } from "./OpenAIClient";

interface ExtendedClientOptions {
headers?: Record<string, string>;
fetch?: typeof globalThis.fetch;
}
import { openai, createOpenAI } from "@ai-sdk/openai";
import { anthropic, createAnthropic } from "@ai-sdk/anthropic";
import { google, createGoogleGenerativeAI } from "@ai-sdk/google";
Expand Down Expand Up @@ -98,33 +103,43 @@ export function getAISDKLanguageModel(
subModelName: string,
apiKey?: string,
baseURL?: string,
headers?: Record<string, string>,
fetch?: typeof globalThis.fetch,
) {
const creator = AISDKProvidersWithAPIKey[subProvider];
if (!creator) {
throw new UnsupportedAISDKModelProviderError(
subProvider,
Object.keys(AISDKProvidersWithAPIKey),
);
}

// Build provider config - all fields are optional
// When apiKey is not provided, creator functions automatically use environment variables
const providerConfig: {
apiKey?: string;
baseURL?: string;
headers?: Record<string, string>;
fetch?: typeof globalThis.fetch;
} = {};

if (apiKey) {
const creator = AISDKProvidersWithAPIKey[subProvider];
if (!creator) {
throw new UnsupportedAISDKModelProviderError(
subProvider,
Object.keys(AISDKProvidersWithAPIKey),
);
}
// Create the provider instance with the API key and baseURL if provided
const providerConfig: { apiKey: string; baseURL?: string } = { apiKey };
if (baseURL) {
providerConfig.baseURL = baseURL;
}
const provider = creator(providerConfig);
// Get the specific model from the provider
return provider(subModelName);
} else {
const provider = AISDKProviders[subProvider];
if (!provider) {
throw new UnsupportedAISDKModelProviderError(
subProvider,
Object.keys(AISDKProviders),
);
}
return provider(subModelName);
providerConfig.apiKey = apiKey;
}
if (baseURL) {
providerConfig.baseURL = baseURL;
}
if (headers) {
providerConfig.headers = headers;
}
if (fetch) {
providerConfig.fetch = fetch;
}

// Type assertion needed: AI SDK types require apiKey, but runtime accepts optional apiKey
// At runtime, when apiKey is not provided, creators automatically use environment variables
const provider = creator(providerConfig as { apiKey: string });
return provider(subModelName);
}

export class LLMProvider {
Expand All @@ -148,6 +163,8 @@ export class LLMProvider {
subModelName,
clientOptions?.apiKey,
clientOptions?.baseURL,
(clientOptions as ExtendedClientOptions)?.headers,
(clientOptions as ExtendedClientOptions)?.fetch,
);

return new AISdkClient({
Expand Down