Skip to content

Commit 5458cb0

Browse files
committed
chore: response streaming
1 parent 9d30a63 commit 5458cb0

File tree

2 files changed

+31
-46
lines changed

2 files changed

+31
-46
lines changed

snippets/ai/src/helpers.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,10 @@ export class LoadingAnimation {
1616
this.message = message;
1717
}
1818

19+
public get isRunning(): boolean {
20+
return this.interval !== null;
21+
}
22+
1923
start(signal: AbortSignal): void {
2024
if (this.interval) {
2125
return;

snippets/ai/src/providers/ai-provider.ts

Lines changed: 27 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -251,30 +251,14 @@ export class AiProvider {
251251
messages: ModelMessage[],
252252
{ systemPrompt, signal }: Omit<GetResponseOptions, 'expectedOutput'>,
253253
): Promise<string> {
254-
// Use streaming for docs provider, generateText for others
255-
if (this.activeProvider === 'docs') {
256-
const result = streamText({
257-
model: this.model,
258-
messages: messages,
259-
system: systemPrompt,
260-
abortSignal: signal ?? AbortSignal.timeout(30_000),
261-
});
262-
263-
let fullText = '';
264-
for await (const chunk of result.textStream) {
265-
fullText += chunk;
266-
}
267-
return fullText;
268-
} else {
269-
const { text } = await generateText({
270-
model: this.model,
254+
const { text } = await generateText({
255+
model: this.model,
271256

272-
messages: messages,
273-
system: systemPrompt,
274-
abortSignal: signal ?? AbortSignal.timeout(30_000),
275-
});
276-
return text;
277-
}
257+
messages: messages,
258+
system: systemPrompt,
259+
abortSignal: signal ?? AbortSignal.timeout(30_000),
260+
});
261+
return text;
278262
}
279263

280264
async processResponse(
@@ -291,27 +275,26 @@ export class AiProvider {
291275
try {
292276
let text: string;
293277

294-
// Use streaming for docs provider, generateText for others
295-
if (this.activeProvider === 'docs') {
296-
const result = streamText({
297-
model: this.model,
298-
messages: this.session.messages,
299-
system: systemPrompt,
300-
abortSignal: signal,
301-
});
278+
// Use streaming for all providers to show output as it arrives
279+
const result = streamText({
280+
model: this.model,
281+
messages: this.session.messages,
282+
system: systemPrompt,
283+
abortSignal: signal,
284+
});
285+
286+
text = '';
302287

303-
text = '';
304-
for await (const chunk of result.textStream) {
305-
text += chunk;
288+
for await (const delta of result.textStream) {
289+
if (this.thinking.isRunning) {
290+
this.thinking.stop();
291+
process.stdout.write(chalk.bold.blue('Response: '));
292+
}
293+
text += delta;
294+
// Output each chunk as it arrives for 'response' mode
295+
if (expectedOutput === 'response') {
296+
process.stdout.write(delta);
306297
}
307-
} else {
308-
const result = await generateText({
309-
model: this.model,
310-
messages: this.session.messages,
311-
system: systemPrompt,
312-
abortSignal: signal,
313-
});
314-
text = result.text;
315298
}
316299

317300
this.session.messages.push({
@@ -322,13 +305,11 @@ export class AiProvider {
322305
switch (expectedOutput) {
323306
case 'command':
324307
this.setInput(
325-
this.formatResponse({ response: text, expectedOutput: 'response' }),
308+
this.formatResponse({ response: text, expectedOutput: 'command' }),
326309
);
327310
break;
328311
case 'response':
329-
this.respond(
330-
this.formatResponse({ response: text, expectedOutput: 'response' }),
331-
);
312+
// Text already streamed to stdout, no need to call respond
332313
break;
333314
}
334315
} catch (error) {

0 commit comments

Comments
 (0)