Skip to content

Commit d6578b7

Browse files
authored
update to gpt-5.1 (#2552)
1 parent 7f22eb0 commit d6578b7

File tree

8 files changed

+13
-11
lines changed

8 files changed

+13
-11
lines changed

cmd/testai/main-testai.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ var testSchemaJSON string
2525

2626
const (
2727
DefaultAnthropicModel = "claude-sonnet-4-5"
28-
DefaultOpenAIModel = "gpt-5"
28+
DefaultOpenAIModel = "gpt-5.1"
2929
)
3030

3131
// TestResponseWriter implements http.ResponseWriter and additional interfaces for testing

frontend/app/aipanel/aipanel-contextmenu.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ export async function handleWaveAIContextMenu(e: React.MouseEvent, showCopy: boo
5858
},
5959
},
6060
{
61-
label: hasPremium ? "Balanced (gpt-5, low thinking)" : "Balanced (premium)",
61+
label: hasPremium ? "Balanced (gpt-5.1, low thinking)" : "Balanced (premium)",
6262
type: "checkbox",
6363
checked: currentThinkingMode === "balanced",
6464
enabled: hasPremium,
@@ -71,7 +71,7 @@ export async function handleWaveAIContextMenu(e: React.MouseEvent, showCopy: boo
7171
},
7272
},
7373
{
74-
label: hasPremium ? "Deep (gpt-5, full thinking)" : "Deep (premium)",
74+
label: hasPremium ? "Deep (gpt-5.1, full thinking)" : "Deep (premium)",
7575
type: "checkbox",
7676
checked: currentThinkingMode === "deep",
7777
enabled: hasPremium,

frontend/app/aipanel/thinkingmode.tsx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,13 +25,13 @@ const ThinkingModeData: Record<ThinkingMode, ThinkingModeMetadata> = {
2525
balanced: {
2626
icon: "fa-sparkles",
2727
name: "Balanced",
28-
desc: "Good mix of speed and accuracy\n(gpt-5 with minimal thinking)",
28+
desc: "Good mix of speed and accuracy\n(gpt-5.1 with minimal thinking)",
2929
premium: true,
3030
},
3131
deep: {
3232
icon: "fa-lightbulb",
3333
name: "Deep",
34-
desc: "Slower but most capable\n(gpt-5 with full reasoning)",
34+
desc: "Slower but most capable\n(gpt-5.1 with full reasoning)",
3535
premium: true,
3636
},
3737
};

frontend/app/aipanel/waveai-model.tsx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ export class WaveAIModel {
8080

8181
this.modelAtom = jotai.atom((get) => {
8282
const modelMetaAtom = getOrefMetaKeyAtom(this.orefContext, "waveai:model");
83-
return get(modelMetaAtom) ?? "gpt-5";
83+
return get(modelMetaAtom) ?? "gpt-5.1";
8484
});
8585

8686
this.widgetAccessAtom = jotai.atom((get) => {

pkg/aiusechat/openai/openai-convertmessage.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -278,7 +278,7 @@ func buildOpenAIHTTPRequest(ctx context.Context, inputs []any, chatOpts uctypes.
278278
reqBody.Reasoning = &ReasoningType{
279279
Effort: opts.ThinkingLevel, // low, medium, high map directly
280280
}
281-
if opts.Model == "gpt-5" {
281+
if opts.Model == "gpt-5" || opts.Model == "gpt-5.1" {
282282
reqBody.Reasoning.Summary = "auto"
283283
}
284284
}

pkg/aiusechat/uctypes/usechat-types.go

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ import (
1212
const DefaultAIEndpoint = "https://cfapi.waveterm.dev/api/waveai"
1313
const DefaultAnthropicModel = "claude-sonnet-4-5"
1414
const DefaultOpenAIModel = "gpt-5-mini"
15-
const PremiumOpenAIModel = "gpt-5"
15+
const PremiumOpenAIModel = "gpt-5.1"
1616

1717
type UseChatRequest struct {
1818
Messages []UIMessage `json:"messages"`
@@ -234,7 +234,7 @@ func (opts AIOptsType) IsWaveProxy() bool {
234234
}
235235

236236
func (opts AIOptsType) IsPremiumModel() bool {
237-
return opts.Model == "gpt-5" || strings.Contains(opts.Model, "claude-sonnet")
237+
return opts.Model == "gpt-5" || opts.Model == "gpt-5.1" || strings.Contains(opts.Model, "claude-sonnet")
238238
}
239239

240240
type AIChat struct {
@@ -552,6 +552,7 @@ func AreModelsCompatible(apiType, model1, model2 string) bool {
552552

553553
if apiType == "openai" {
554554
gpt5Models := map[string]bool{
555+
"gpt-5.1": true,
555556
"gpt-5": true,
556557
"gpt-5-mini": true,
557558
"gpt-5-nano": true,

pkg/aiusechat/usechat.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -475,7 +475,7 @@ func RunAIChat(ctx context.Context, sseHandler *sse.SSEHandlerCh, chatOpts uctyp
475475
}
476476
}
477477
if stopReason != nil && stopReason.Kind == uctypes.StopKindPremiumRateLimit && chatOpts.Config.APIType == APIType_OpenAI && chatOpts.Config.Model == uctypes.PremiumOpenAIModel {
478-
log.Printf("Premium rate limit hit with gpt-5, switching to gpt-5-mini\n")
478+
log.Printf("Premium rate limit hit with gpt-5.1, switching to gpt-5-mini\n")
479479
cont = &uctypes.WaveContinueResponse{
480480
MessageID: "",
481481
Model: uctypes.DefaultOpenAIModel,

pkg/waveai/openaibackend.go

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,8 @@ func isReasoningModel(model string) bool {
3434
return strings.HasPrefix(m, "o1") ||
3535
strings.HasPrefix(m, "o3") ||
3636
strings.HasPrefix(m, "o4") ||
37-
strings.HasPrefix(m, "gpt-5")
37+
strings.HasPrefix(m, "gpt-5") ||
38+
strings.HasPrefix(m, "gpt-5.1")
3839
}
3940

4041
func setApiType(opts *wshrpc.WaveAIOptsType, clientConfig *openaiapi.ClientConfig) error {

0 commit comments

Comments
 (0)