Skip to content

Commit 0598fc9

Browse files
committed
🤖 feat: add OpenRouter provider support
Add official OpenRouter provider integration for access to 300+ models through a single API. Fixes errors that occurred when using baseURL override approach. Changes: - Install @openrouter/ai-sdk-provider package - Add OpenRouter to aiService.ts createModel method - Add OpenRouterProviderOptions to provider types - Add OPENROUTER_API_KEY environment variable support - Update docs/models.md with OpenRouter setup guide OpenRouter provides: - Universal model access (Anthropic, OpenAI, Google, Cerebras, etc.) - Pay-as-you-go pricing with transparent per-token costs - High availability with automatic failover - Immediate access to new models Usage: openrouter:anthropic/claude-3.5-sonnet openrouter:google/gemini-2.0-flash-thinking-exp openrouter:cerebras/glm-4.6 openrouter:deepseek/deepseek-chat _Generated with `cmux`_
1 parent bb2c785 commit 0598fc9

File tree

7 files changed

+79
-1
lines changed

7 files changed

+79
-1
lines changed

.env.example

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
# Required for integration tests when TEST_INTEGRATION=1
55
ANTHROPIC_API_KEY=sk-ant-...
66
OPENAI_API_KEY=sk-proj-...
7+
OPENROUTER_API_KEY=sk-or-v1-...
78

89
# Optional: Set to 1 to run integration tests
910
# Integration tests require API keys to be set

bun.lock

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
"dependencies": {
77
"@ai-sdk/anthropic": "^2.0.29",
88
"@ai-sdk/openai": "^2.0.52",
9+
"@openrouter/ai-sdk-provider": "^1.2.1",
910
"@radix-ui/react-dialog": "^1.1.15",
1011
"@radix-ui/react-dropdown-menu": "^2.1.16",
1112
"@radix-ui/react-scroll-area": "^1.2.10",
@@ -405,6 +406,8 @@
405406

406407
"@nodelib/fs.walk": ["@nodelib/fs.walk@1.2.8", "", { "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" } }, "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg=="],
407408

409+
"@openrouter/ai-sdk-provider": ["@openrouter/ai-sdk-provider@1.2.1", "", { "peerDependencies": { "ai": "^5.0.0", "zod": "^3.24.1 || ^v4" } }, "sha512-sDc+/tlEM9VTsYlZ3YMwD9AHinSNusdLFGQhtb50eo5r68U/yBixEHRsKEevqSspiX3V6J06hU7C25t4KE9iag=="],
410+
408411
"@opentelemetry/api": ["@opentelemetry/api@1.9.0", "", {}, "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg=="],
409412

410413
"@pkgjs/parseargs": ["@pkgjs/parseargs@0.11.0", "", {}, "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg=="],

docs/models.md

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,36 @@ GPT-5 family of models:
2727

2828
TODO: add issue link here.
2929

30+
#### OpenRouter (Cloud)
31+
32+
Access 300+ models from multiple providers through a single API:
33+
34+
- `openrouter:anthropic/claude-3.5-sonnet`
35+
- `openrouter:google/gemini-2.0-flash-thinking-exp`
36+
- `openrouter:deepseek/deepseek-chat`
37+
- `openrouter:openai/gpt-4o`
38+
- Any model from [OpenRouter Models](https://openrouter.ai/models)
39+
40+
**Setup:**
41+
42+
1. Get your API key from [openrouter.ai](https://openrouter.ai/)
43+
2. Add to `~/.cmux/providers.jsonc`:
44+
45+
```jsonc
46+
{
47+
"openrouter": {
48+
"apiKey": "sk-or-v1-...",
49+
},
50+
}
51+
```
52+
53+
**Benefits:**
54+
55+
- Single API key for hundreds of models
56+
- Pay-as-you-go pricing with no monthly fees
57+
- Transparent per-token costs
58+
- Automatic failover for high availability
59+
3060
#### Ollama (Local)
3161

3262
Run models locally with Ollama. No API key required:
@@ -68,6 +98,10 @@ All providers are configured in `~/.cmux/providers.jsonc`. Example configuration
6898
"openai": {
6999
"apiKey": "sk-...",
70100
},
101+
// Required for OpenRouter models
102+
"openrouter": {
103+
"apiKey": "sk-or-v1-...",
104+
},
71105
// Optional for Ollama (only needed for custom URL)
72106
"ollama": {
73107
"baseUrl": "http://your-server:11434/api",

package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,7 @@
4747
"dependencies": {
4848
"@ai-sdk/anthropic": "^2.0.29",
4949
"@ai-sdk/openai": "^2.0.52",
50+
"@openrouter/ai-sdk-provider": "^1.2.1",
5051
"@radix-ui/react-dialog": "^1.1.15",
5152
"@radix-ui/react-dropdown-menu": "^2.1.16",
5253
"@radix-ui/react-scroll-area": "^1.2.10",

src/services/aiService.ts

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -105,6 +105,7 @@ export async function preloadAISDKProviders(): Promise<void> {
105105
import("@ai-sdk/anthropic"),
106106
import("@ai-sdk/openai"),
107107
import("ollama-ai-provider-v2"),
108+
import("@openrouter/ai-sdk-provider"),
108109
]);
109110
}
110111

@@ -416,6 +417,30 @@ export class AIService extends EventEmitter {
416417
return Ok(provider(modelId));
417418
}
418419

420+
// Handle OpenRouter provider
421+
if (providerName === "openrouter") {
422+
if (!providerConfig.apiKey) {
423+
return Err({
424+
type: "api_key_not_found",
425+
provider: providerName,
426+
});
427+
}
428+
// Use custom fetch if provided, otherwise default with unlimited timeout
429+
const baseFetch =
430+
typeof providerConfig.fetch === "function"
431+
? (providerConfig.fetch as typeof fetch)
432+
: defaultFetchWithUnlimitedTimeout;
433+
434+
// Lazy-load OpenRouter provider to reduce startup time
435+
const { createOpenRouter } = await import("@openrouter/ai-sdk-provider");
436+
const provider = createOpenRouter({
437+
...providerConfig,
438+
// eslint-disable-next-line @typescript-eslint/no-explicit-any, @typescript-eslint/no-unsafe-assignment
439+
fetch: baseFetch as any,
440+
});
441+
return Ok(provider(modelId));
442+
}
443+
419444
return Err({
420445
type: "provider_not_supported",
421446
provider: providerName,

src/types/providerOptions.ts

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,14 @@ export interface OpenAIProviderOptions {
3737
// eslint-disable-next-line @typescript-eslint/no-empty-object-type
3838
export interface OllamaProviderOptions {}
3939

40+
/**
41+
* OpenRouter-specific options
42+
* Currently empty - OpenRouter handles provider-specific options via extraBody.
43+
* This interface is provided for future extensibility.
44+
*/
45+
// eslint-disable-next-line @typescript-eslint/no-empty-object-type
46+
export interface OpenRouterProviderOptions {}
47+
4048
/**
4149
* Cmux provider options - used by both frontend and backend
4250
*/
@@ -45,4 +53,5 @@ export interface CmuxProviderOptions {
4553
anthropic?: AnthropicProviderOptions;
4654
openai?: OpenAIProviderOptions;
4755
ollama?: OllamaProviderOptions;
56+
openrouter?: OpenRouterProviderOptions;
4857
}

src/utils/providers/ensureProvidersConfig.ts

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,11 @@ const buildProvidersFromEnv = (env: NodeJS.ProcessEnv): ProvidersConfig => {
4949
providers.openai = entry;
5050
}
5151

52+
const openRouterKey = trim(env.OPENROUTER_API_KEY);
53+
if (openRouterKey.length > 0) {
54+
providers.openrouter = { apiKey: openRouterKey };
55+
}
56+
5257
if (!providers.openai) {
5358
const azureKey = trim(env.AZURE_OPENAI_API_KEY);
5459
const azureEndpoint = trim(env.AZURE_OPENAI_ENDPOINT);
@@ -97,7 +102,7 @@ export const ensureProvidersConfig = (
97102
const providersFromEnv = buildProvidersFromEnv(env);
98103
if (!hasAnyConfiguredProvider(providersFromEnv)) {
99104
throw new Error(
100-
"No provider credentials found. Configure providers.jsonc or set ANTHROPIC_API_KEY / OPENAI_API_KEY."
105+
"No provider credentials found. Configure providers.jsonc or set ANTHROPIC_API_KEY / OPENAI_API_KEY / OPENROUTER_API_KEY."
101106
);
102107
}
103108

0 commit comments

Comments
 (0)