Skip to content

Commit 22f935b

Browse files
committed
do not split message into two parts
1 parent 10aeb70 commit 22f935b

File tree

4 files changed

+10
-43
lines changed

4 files changed

+10
-43
lines changed

common/src/util/messages.ts

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,11 @@ export function withoutCacheControl<
5050
>(obj: T): T {
5151
const wrapper = cloneDeep(obj)
5252

53-
for (const provider of ['anthropic', 'openrouter', 'openaiCompatible'] as const) {
53+
for (const provider of [
54+
'anthropic',
55+
'openrouter',
56+
'openaiCompatible',
57+
] as const) {
5458
if (has(wrapper.providerOptions?.[provider]?.cache_control, 'type')) {
5559
delete wrapper.providerOptions?.[provider]?.cache_control?.type
5660
}
@@ -305,14 +309,7 @@ export function convertCbToModelMessages({
305309

306310
prevMessage.content = [
307311
...contentBlock.slice(0, lastContentIndex),
308-
{
309-
...lastContentPart,
310-
text: lastContentPart.text.slice(0, 1),
311-
},
312-
withCacheControl({
313-
...lastContentPart,
314-
text: lastContentPart.text.slice(1),
315-
}),
312+
withCacheControl(lastContentPart),
316313
...contentBlock.slice(lastContentIndex + 1),
317314
] as typeof contentBlock
318315

packages/internal/src/openai-compatible/chat/convert-to-openai-compatible-chat-messages.ts

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -26,15 +26,6 @@ export function convertToOpenAICompatibleChatMessages(
2626
}
2727

2828
case 'user': {
29-
if (content.length === 1 && content[0].type === 'text') {
30-
messages.push({
31-
role: 'user',
32-
content: content[0].text,
33-
...getOpenAIMetadata(content[0]),
34-
})
35-
break
36-
}
37-
3829
messages.push({
3930
role: 'user',
4031
content: content.map((part) => {

packages/internal/src/openrouter-ai-sdk/chat/convert-to-openrouter-chat-messages.ts

Lines changed: 0 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -47,27 +47,6 @@ export function convertToOpenRouterChatMessages(
4747
}
4848

4949
case 'user': {
50-
if (content.length === 1 && content[0]?.type === 'text') {
51-
const cacheControl =
52-
getCacheControl(providerOptions) ??
53-
getCacheControl(content[0].providerOptions)
54-
const contentWithCacheControl: string | ChatCompletionContentPart[] =
55-
cacheControl
56-
? [
57-
{
58-
type: 'text',
59-
text: content[0].text,
60-
cache_control: cacheControl,
61-
},
62-
]
63-
: content[0].text
64-
messages.push({
65-
role: 'user',
66-
content: contentWithCacheControl,
67-
})
68-
break
69-
}
70-
7150
// Get message level cache control
7251
const messageCacheControl = getCacheControl(providerOptions)
7352
const contentParts: ChatCompletionContentPart[] = content.map(

sdk/src/impl/llm.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,5 @@
11
import path from 'path'
22

3-
import {
4-
OpenAICompatibleChatLanguageModel,
5-
VERSION,
6-
} from '@ai-sdk/openai-compatible'
73
import {
84
checkLiveUserInput,
95
getLiveUserInputIds,
@@ -14,6 +10,10 @@ import { getErrorObject } from '@codebuff/common/util/error'
1410
import { convertCbToModelMessages } from '@codebuff/common/util/messages'
1511
import { isExplicitlyDefinedModel } from '@codebuff/common/util/model-utils'
1612
import { StopSequenceHandler } from '@codebuff/common/util/stop-sequence'
13+
import {
14+
OpenAICompatibleChatLanguageModel,
15+
VERSION,
16+
} from '@codebuff/internal/openai-compatible/index'
1717
import { streamText, APICallError, generateText, generateObject } from 'ai'
1818

1919
import { WEBSITE_URL } from '../constants'

0 commit comments

Comments
 (0)