diff --git a/docs/content/docs/api-reference/workflow-ai/durable-agent.mdx b/docs/content/docs/api-reference/workflow-ai/durable-agent.mdx index 2fc4812c8..331c835b8 100644 --- a/docs/content/docs/api-reference/workflow-ai/durable-agent.mdx +++ b/docs/content/docs/api-reference/workflow-ai/durable-agent.mdx @@ -29,6 +29,8 @@ async function myAgent() { const agent = new DurableAgent({ model: 'anthropic/claude-haiku-4.5', system: 'You are a helpful weather assistant.', + temperature: 0.7, // Control output randomness + maxOutputTokens: 1000, // Limit response length tools: { getWeather: { description: 'Get weather for a city', @@ -177,6 +179,52 @@ async function multiToolAgentWorkflow(userQuery: string) { } ``` +### Advanced Configuration + +```typescript +import { DurableAgent } from '@workflow/ai/agent'; +import { z } from 'zod'; + +async function calculateResult({ formula }: { formula: string }) { + "use step"; + // Perform calculation + return "42"; +} + +async function advancedAgentWorkflow(userQuery: string) { + 'use workflow'; + + const agent = new DurableAgent({ + model: 'anthropic/claude-haiku-4.5', + system: 'You are a precise calculator assistant.', + // Model behavior controls + temperature: 0.3, // Lower temperature for more deterministic responses + maxOutputTokens: 500, // Limit response length + topP: 0.9, // Nucleus sampling for response variety + presencePenalty: 0.2, // Reduce repetition + frequencyPenalty: 0.2, // Reduce word repetition + seed: 12345, // For reproducible results + stopSequences: ['END'], // Stop generation at specific sequences + tools: { + calculateResult: { + description: 'Calculate a mathematical result', + inputSchema: z.object({ formula: z.string() }), + execute: calculateResult, + }, + }, + }); + + await agent.stream({ + messages: [ + { + role: 'user', + content: userQuery, + }, + ], + }); +} +``` + ### Tools with Workflow Library Features ```typescript diff --git a/packages/ai/src/agent/do-stream-step.ts b/packages/ai/src/agent/do-stream-step.ts index 768910800..43f33a200 100644 --- a/packages/ai/src/agent/do-stream-step.ts +++ b/packages/ai/src/agent/do-stream-step.ts @@ -12,7 +12,17 @@ export async function doStreamStep( conversationPrompt: LanguageModelV2Prompt, modelId: string, writable: WritableStream, - tools?: LanguageModelV2CallOptions['tools'] + tools?: LanguageModelV2CallOptions['tools'], + callOptions?: { + temperature?: number; + maxOutputTokens?: number; + topP?: number; + topK?: number; + presencePenalty?: number; + frequencyPenalty?: number; + stopSequences?: string[]; + seed?: number; + } ) { 'use step'; @@ -20,6 +30,14 @@ export async function doStreamStep( const result = await model.doStream({ prompt: conversationPrompt, tools, + temperature: callOptions?.temperature, + maxOutputTokens: callOptions?.maxOutputTokens, + topP: callOptions?.topP, + topK: callOptions?.topK, + presencePenalty: callOptions?.presencePenalty, + frequencyPenalty: callOptions?.frequencyPenalty, + stopSequences: callOptions?.stopSequences, + seed: callOptions?.seed, }); let finish: FinishPart | undefined; diff --git a/packages/ai/src/agent/durable-agent.test.ts b/packages/ai/src/agent/durable-agent.test.ts new file mode 100644 index 000000000..341d9839a --- /dev/null +++ b/packages/ai/src/agent/durable-agent.test.ts @@ -0,0 +1,193 @@ +/** + * Tests for DurableAgent + * + * These tests verify that the DurableAgent constructor properly accepts + * and stores configuration options from the AI SDK Agent class. + */ +import { describe, expect, it } from 'vitest'; +import { DurableAgent } from './durable-agent.js'; + +describe('DurableAgent', () => { + describe('constructor', () => { + it('should accept basic required options', () => { + const agent = new DurableAgent({ + model: 'anthropic/claude-opus', + tools: {}, + }); + + expect(agent).toBeDefined(); + expect(agent.model).toBe('anthropic/claude-opus'); + expect(agent.tools).toEqual({}); + }); + + it('should accept system prompt', () => { + const agent = new DurableAgent({ + model: 'anthropic/claude-opus', + tools: {}, + system: 'You are a helpful assistant.', + }); + + expect(agent).toBeDefined(); + expect(agent.system).toBe('You are a helpful assistant.'); + }); + + it('should accept temperature option', () => { + const agent = new DurableAgent({ + model: 'anthropic/claude-opus', + tools: {}, + temperature: 0.7, + }); + + expect(agent).toBeDefined(); + expect(agent.temperature).toBe(0.7); + }); + + it('should accept maxOutputTokens option', () => { + const agent = new DurableAgent({ + model: 'anthropic/claude-opus', + tools: {}, + maxOutputTokens: 1000, + }); + + expect(agent).toBeDefined(); + expect(agent.maxOutputTokens).toBe(1000); + }); + + it('should accept topP option', () => { + const agent = new DurableAgent({ + model: 'anthropic/claude-opus', + tools: {}, + topP: 0.9, + }); + + expect(agent).toBeDefined(); + expect(agent.topP).toBe(0.9); + }); + + it('should accept topK option', () => { + const agent = new DurableAgent({ + model: 'anthropic/claude-opus', + tools: {}, + topK: 40, + }); + + expect(agent).toBeDefined(); + expect(agent.topK).toBe(40); + }); + + it('should accept presencePenalty option', () => { + const agent = new DurableAgent({ + model: 'anthropic/claude-opus', + tools: {}, + presencePenalty: 0.5, + }); + + expect(agent).toBeDefined(); + expect(agent.presencePenalty).toBe(0.5); + }); + + it('should accept frequencyPenalty option', () => { + const agent = new DurableAgent({ + model: 'anthropic/claude-opus', + tools: {}, + frequencyPenalty: 0.5, + }); + + expect(agent).toBeDefined(); + expect(agent.frequencyPenalty).toBe(0.5); + }); + + it('should accept stopSequences option', () => { + const agent = new DurableAgent({ + model: 'anthropic/claude-opus', + tools: {}, + stopSequences: ['STOP', 'END'], + }); + + expect(agent).toBeDefined(); + expect(agent.stopSequences).toEqual(['STOP', 'END']); + }); + + it('should accept seed option', () => { + const agent = new DurableAgent({ + model: 'anthropic/claude-opus', + tools: {}, + seed: 12345, + }); + + expect(agent).toBeDefined(); + expect(agent.seed).toBe(12345); + }); + + it('should accept all options together', () => { + const agent = new DurableAgent({ + model: 'anthropic/claude-opus', + tools: {}, + system: 'You are a helpful assistant.', + temperature: 0.7, + maxOutputTokens: 1000, + topP: 0.9, + topK: 40, + presencePenalty: 0.5, + frequencyPenalty: 0.3, + stopSequences: ['STOP', 'END'], + seed: 12345, + }); + + expect(agent).toBeDefined(); + expect(agent.model).toBe('anthropic/claude-opus'); + expect(agent.system).toBe('You are a helpful assistant.'); + expect(agent.temperature).toBe(0.7); + expect(agent.maxOutputTokens).toBe(1000); + expect(agent.topP).toBe(0.9); + expect(agent.topK).toBe(40); + expect(agent.presencePenalty).toBe(0.5); + expect(agent.frequencyPenalty).toBe(0.3); + expect(agent.stopSequences).toEqual(['STOP', 'END']); + expect(agent.seed).toBe(12345); + }); + + it('should accept tools with proper structure', () => { + const tools = { + testTool: { + description: 'A test tool', + inputSchema: { + type: 'object', + properties: {}, + }, + execute: async () => 'result', + }, + }; + + const agent = new DurableAgent({ + model: 'anthropic/claude-opus', + tools, + }); + + expect(agent).toBeDefined(); + expect(agent.tools).toBe(tools); + }); + }); + + describe('methods', () => { + it('should have generate method', () => { + const agent = new DurableAgent({ + model: 'anthropic/claude-opus', + tools: {}, + }); + + expect(agent.generate).toBeDefined(); + expect(typeof agent.generate).toBe('function'); + }); + + it('should have stream method', () => { + const agent = new DurableAgent({ + model: 'anthropic/claude-opus', + tools: {}, + }); + + expect(agent.stream).toBeDefined(); + expect(typeof agent.stream).toBe('function'); + }); + }); +}); diff --git a/packages/ai/src/agent/durable-agent.ts b/packages/ai/src/agent/durable-agent.ts index 7c938dc59..87b2ac8a0 100644 --- a/packages/ai/src/agent/durable-agent.ts +++ b/packages/ai/src/agent/durable-agent.ts @@ -33,6 +33,67 @@ export interface DurableAgentOptions { * Optional system prompt to guide the agent's behavior. */ system?: string; + + /** + * Temperature setting. The range depends on the provider and model. + * + * It is recommended to set either `temperature` or `topP`, but not both. + */ + temperature?: number; + + /** + * Maximum number of tokens to generate. + */ + maxOutputTokens?: number; + + /** + * Nucleus sampling. This is a number between 0 and 1. + * + * E.g. 0.1 would mean that only tokens with the top 10% probability mass + * are considered. + * + * It is recommended to set either `temperature` or `topP`, but not both. + */ + topP?: number; + + /** + * Only sample from the top K options for each subsequent token. + * + * Used to remove "long tail" low probability responses. + * Recommended for advanced use cases only. You usually only need to use temperature. + */ + topK?: number; + + /** + * Presence penalty setting. It affects the likelihood of the model to + * repeat information that is already in the prompt. + * + * The presence penalty is a number between -1 (increase repetition) + * and 1 (maximum penalty, decrease repetition). 0 means no penalty. + */ + presencePenalty?: number; + + /** + * Frequency penalty setting. It affects the likelihood of the model + * to repeatedly use the same words or phrases. + * + * The frequency penalty is a number between -1 (increase repetition) + * and 1 (maximum penalty, decrease repetition). 0 means no penalty. + */ + frequencyPenalty?: number; + + /** + * Stop sequences. + * If set, the model will stop generating text when one of the stop sequences is generated. + * Providers may have limits on the number of stop sequences. + */ + stopSequences?: string[]; + + /** + * The seed (integer) to use for random sampling. If set and supported + * by the model, calls will generate deterministic results. + */ + seed?: number; } /** @@ -89,14 +150,30 @@ export interface DurableAgentStreamOptions { * ``` */ export class DurableAgent { - private model: string; - private tools: ToolSet; - private system?: string; + model: string; + tools: ToolSet; + system?: string; + temperature?: number; + maxOutputTokens?: number; + topP?: number; + topK?: number; + presencePenalty?: number; + frequencyPenalty?: number; + stopSequences?: string[]; + seed?: number; constructor(options: DurableAgentOptions) { this.model = options.model; this.tools = options.tools; this.system = options.system; + this.temperature = options.temperature; + this.maxOutputTokens = options.maxOutputTokens; + this.topP = options.topP; + this.topK = options.topK; + this.presencePenalty = options.presencePenalty; + this.frequencyPenalty = options.frequencyPenalty; + this.stopSequences = options.stopSequences; + this.seed = options.seed; } generate() { @@ -124,6 +201,14 @@ export class DurableAgent { tools: this.tools, writable, prompt: modelPrompt, + temperature: this.temperature, + maxOutputTokens: this.maxOutputTokens, + topP: this.topP, + topK: this.topK, + presencePenalty: this.presencePenalty, + frequencyPenalty: this.frequencyPenalty, + stopSequences: this.stopSequences, + seed: this.seed, }); let result = await iterator.next(); diff --git a/packages/ai/src/agent/stream-text-iterator.ts b/packages/ai/src/agent/stream-text-iterator.ts index 1b4ef5ea8..e2d83a742 100644 --- a/packages/ai/src/agent/stream-text-iterator.ts +++ b/packages/ai/src/agent/stream-text-iterator.ts @@ -13,11 +13,27 @@ export async function* streamTextIterator({ tools = {}, writable, model, + temperature, + maxOutputTokens, + topP, + topK, + presencePenalty, + frequencyPenalty, + stopSequences, + seed, }: { prompt: LanguageModelV2Prompt; tools: ToolSet; writable: WritableStream; model: string; + temperature?: number; + maxOutputTokens?: number; + topP?: number; + topK?: number; + presencePenalty?: number; + frequencyPenalty?: number; + stopSequences?: string[]; + seed?: number; }): AsyncGenerator< LanguageModelV2ToolCall[], void, @@ -31,7 +47,17 @@ export async function* streamTextIterator({ conversationPrompt, model, writable, - toolsToModelTools(tools) + toolsToModelTools(tools), + { + temperature, + maxOutputTokens, + topP, + topK, + presencePenalty, + frequencyPenalty, + stopSequences, + seed, + } ); if (finish?.finishReason === 'tool-calls') {