diff --git a/packages/proxy/schema/index.ts b/packages/proxy/schema/index.ts index 24a8c290..32f68233 100644 --- a/packages/proxy/schema/index.ts +++ b/packages/proxy/schema/index.ts @@ -607,6 +607,7 @@ export const AvailableEndpointTypes: { [name: string]: ModelEndpointType[] } = { "grok-2-1212": ["xAI"], "grok-vision-beta": ["xAI"], "grok-beta": ["xAI"], + "deepseek.v3.2": ["bedrock"], "amazon.nova-premier-v1:0": ["bedrock"], "amazon.nova-2-lite-v1:0": ["bedrock"], "magistral-medium-2509": ["mistral"], diff --git a/packages/proxy/schema/model_list.json b/packages/proxy/schema/model_list.json index 6c9c4552..c25ecf93 100644 --- a/packages/proxy/schema/model_list.json +++ b/packages/proxy/schema/model_list.json @@ -11443,6 +11443,16 @@ "baseten" ] }, + "deepseek.v3.2": { + "format": "converse", + "flavor": "chat", + "input_cost_per_mil_tokens": 0.62, + "output_cost_per_mil_tokens": 1.85, + "displayName": "DeepSeek V3.2", + "max_input_tokens": 164000, + "max_output_tokens": 8000, + "available_providers": [ + "bedrock" "LiquidAI/LFM2-24B-A2B": { "format": "openai", "flavor": "chat",