Skip to content

Commit 00e3345

Browse files
[ai] Make DurableAgent#stream() return a messages array (#362)
* Make `DurableAgent#stream()` return a `messages` array * Docs updates * Docs updates * fix --------- Co-authored-by: Pranay Prakash <pranay.gp@gmail.com>
1 parent 7a230a9 commit 00e3345

File tree

4 files changed

+80
-2
lines changed

4 files changed

+80
-2
lines changed

.changeset/eleven-roses-enter.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"@workflow/ai": patch
3+
---
4+
5+
Make `DurableAgent#stream()` return a `messages` array

docs/content/docs/api-reference/workflow-ai/durable-agent.mdx

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,7 @@ export default DurableAgentStreamOptions;`}
8686
- Tools can be implemented as workflow steps (using `"use step"` for automatic retries), or as regular workflow-level logic
8787
- Tools can use core library features like `sleep()` and Hooks within their `execute` functions
8888
- The agent processes tool calls iteratively until completion
89+
- The `stream()` method returns `{ messages }` containing the full conversation history, including initial messages, assistant responses, and tool results
8990

9091
## Examples
9192

@@ -180,6 +181,58 @@ async function multiToolAgentWorkflow(userQuery: string) {
180181
}
181182
```
182183

184+
### Multi-turn Conversation
185+
186+
```typescript
187+
import { DurableAgent } from '@workflow/ai/agent';
188+
import { z } from 'zod';
189+
190+
async function searchProducts({ query }: { query: string }) {
191+
"use step";
192+
// Search product database
193+
return `Found 3 products matching "${query}"`;
194+
}
195+
196+
async function multiTurnAgentWorkflow() {
197+
'use workflow';
198+
199+
const agent = new DurableAgent({
200+
model: 'anthropic/claude-haiku-4.5',
201+
tools: {
202+
searchProducts: {
203+
description: 'Search for products',
204+
inputSchema: z.object({ query: z.string() }),
205+
execute: searchProducts,
206+
},
207+
},
208+
});
209+
210+
const writable = getWritable<UIMessageChunk>();
211+
212+
// First user message
213+
// - Result is streamed to the provided `writable` stream
214+
// - Message history is returned in `messages` for LLM context
215+
let { messages } = await agent.stream({
216+
messages: [
217+
{ role: 'user', content: 'Find me some laptops' }
218+
],
219+
writable,
220+
});
221+
222+
// Continue the conversation with the accumulated message history
223+
const result = await agent.stream({
224+
messages: [
225+
...messages,
226+
{ role: 'user', content: 'Which one has the best battery life?' }
227+
],
228+
writable,
229+
});
230+
231+
// result.messages now contains the complete conversation history
232+
return result.messages;
233+
}
234+
```
235+
183236
### Tools with Workflow Library Features
184237

185238
```typescript

packages/ai/src/agent/durable-agent.ts

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -168,6 +168,12 @@ export class DurableAgent {
168168
if (sendFinish || !preventClose) {
169169
await closeStream(options.writable, preventClose, sendFinish);
170170
}
171+
172+
// The iterator returns the final conversation prompt (LanguageModelV2Prompt)
173+
// which is compatible with ModelMessage[]
174+
const messages = result.value as ModelMessage[];
175+
176+
return { messages };
171177
}
172178
}
173179

@@ -215,7 +221,7 @@ async function executeTool(
215221
try {
216222
const toolResult = await tool.execute(input.value, {
217223
toolCallId: toolCall.toolCallId,
218-
// TODO: pass the proper messages to the tool
224+
// TODO: pass the proper messages to the tool (we'd need to pass them through the iterator)
219225
messages: [],
220226
});
221227

packages/ai/src/agent/stream-text-iterator.ts

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ export async function* streamTextIterator({
2525
sendStart?: boolean;
2626
}): AsyncGenerator<
2727
LanguageModelV2ToolCall[],
28-
void,
28+
LanguageModelV2Prompt,
2929
LanguageModelV2ToolResultPart[]
3030
> {
3131
const conversationPrompt = [...prompt]; // Create a mutable copy
@@ -78,11 +78,25 @@ export async function* streamTextIterator({
7878
}
7979
}
8080
} else if (finish?.finishReason === 'stop') {
81+
// Add assistant message with text content to the conversation
82+
const textContent = step.content.filter(
83+
(item) => item.type === 'text'
84+
) as Array<{ type: 'text'; text: string }>;
85+
86+
if (textContent.length > 0) {
87+
conversationPrompt.push({
88+
role: 'assistant',
89+
content: textContent,
90+
});
91+
}
92+
8193
done = true;
8294
} else {
8395
throw new Error(`Unexpected finish reason: ${finish?.finishReason}`);
8496
}
8597
}
98+
99+
return conversationPrompt;
86100
}
87101

88102
async function writeToolOutputToUI(

0 commit comments

Comments
 (0)