Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 6 additions & 15 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,21 +16,12 @@ bun dev

Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.

You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file.
## Deploy on Cloudflare

This project uses [`next/font`](https://nextjs.org/docs/app/building-your-application/optimizing/fonts) to automatically optimize and load [Geist](https://vercel.com/font), a new font family for Vercel.
When you're ready to deploy to production, you can follow these steps: https://docs.layercode.com/how-tos/deploy-nextjs-to-cloudflare

## Learn More
So that user conversation history is correctly stored, you must configure Cloudflare KV store. Run the following, and ensure your wrangler.jsonc is updated with the correct KV binding info.

To learn more about Next.js, take a look at the following resources:

- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API.
- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial.

You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js) - your feedback and contributions are welcome!

## Deploy on Vercel

The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js.

Check out our [Next.js deployment documentation](https://nextjs.org/docs/app/building-your-application/deploying) for more details.
```bash
npx wrangler kv namespace create MESSAGES_KV
```
29 changes: 14 additions & 15 deletions app/api/agent/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import { streamText, ModelMessage, tool, stepCountIs } from 'ai';
import z from 'zod';
import { streamResponse, verifySignature } from '@layercode/node-server-sdk';
import { prettyPrintMsgs } from '@/app/utils/msgs';
import { getConversationStore } from '@/app/utils/conversationStorage';
import config from '@/layercode.config.json';

export type MessageWithTurnId = ModelMessage & { turn_id: string };
Expand All @@ -26,10 +27,6 @@ const WELCOME_MESSAGE = config.welcome_message;

const openai = createOpenAI({ apiKey: process.env.OPENAI_API_KEY! });

// In production we recommend fast datastore like Redis or Cloudflare D1 for storing conversation history
// Here we use a simple in-memory object for demo purposes
const conversations = {} as Record<string, MessageWithTurnId[]>;

export const POST = async (request: Request) => {
const requestBody = (await request.json()) as WebhookRequest;
console.log('Webhook received from Layercode', requestBody);
Expand All @@ -46,20 +43,20 @@ export const POST = async (request: Request) => {

const { conversation_id, text: userText, turn_id, type, interruption_context } = requestBody;

// If this is a new conversation, create a new array to hold messages
if (!conversations[conversation_id]) {
conversations[conversation_id] = [];
}
const store = getConversationStore();
const conversation = await store.load(conversation_id);

// Immediately store the user message received
conversations[conversation_id].push({ role: 'user', turn_id, content: userText });
conversation.push({ role: 'user', turn_id, content: userText });
await store.persist(conversation_id, conversation);

switch (type) {
case 'session.start':
// A new session/call has started. If you want to send a welcome message (have the agent speak first), return that here.
return streamResponse(requestBody, async ({ stream }) => {
// Save the welcome message to the conversation history
conversations[conversation_id].push({ role: 'assistant', turn_id, content: WELCOME_MESSAGE });
conversation.push({ role: 'assistant', turn_id, content: WELCOME_MESSAGE });
await store.persist(conversation_id, conversation);
// Send the welcome message to be spoken
stream.tts(WELCOME_MESSAGE);
stream.end();
Expand All @@ -68,7 +65,8 @@ export const POST = async (request: Request) => {
// The user has spoken and the transcript has been received. Call our LLM and genereate a response.

// Before generating a response, we store a placeholder assistant msg in the history. This is so that if the agent response is interrupted (which is common for voice agents), before we have the chance to save our agent's response, our conversation history will still follow the correct user-assistant turn order.
const assistantResposneIdx = conversations[conversation_id].push({ role: 'assistant', turn_id, content: '' });
const assistantResposneIdx = conversation.push({ role: 'assistant', turn_id, content: '' });
await store.persist(conversation_id, conversation);
return streamResponse(requestBody, async ({ stream }) => {
const weather = tool({
description: 'Get the weather in a location',
Expand All @@ -89,21 +87,22 @@ export const POST = async (request: Request) => {
const { textStream } = streamText({
model: openai('gpt-4o-mini'),
system: SYSTEM_PROMPT,
messages: conversations[conversation_id], // The user message has already been added to the conversation array earlier, so the LLM will be responding to that.
messages: conversation, // The user message has already been added to the conversation array earlier, so the LLM will be responding to that.
tools: { weather },
toolChoice: 'auto',
stopWhen: stepCountIs(10),
onFinish: async ({ response }) => {
// The assistant has finished generating the full response text. Now we update our conversation history with the additional messages generated. For a simple LLM generated single agent response, there will be one additional message. If you add some tools, and allow multi-step agent mode, there could be multiple additional messages which all need to be added to the conversation history.

// First, we remove the placeholder assistant message we added earlier, as we will be replacing it with the actual generated messages.
conversations[conversation_id].splice(assistantResposneIdx - 1, 1);
conversation.splice(assistantResposneIdx - 1, 1);

// Push the new messages returned from the LLM into the conversation history, adding the Layercode turn_id to each message.
conversations[conversation_id].push(...response.messages.map((m) => ({ ...m, turn_id })));
conversation.push(...response.messages.map((m) => ({ ...m, turn_id })));
await store.persist(conversation_id, conversation);

console.log('--- final message history ---');
prettyPrintMsgs(conversations[conversation_id]);
prettyPrintMsgs(conversation);

stream.end(); // Tell Layercode we are done responding
}
Expand Down
4 changes: 3 additions & 1 deletion app/api/authorize/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,5 +22,7 @@ export const POST = async (request: Request) => {
const text = await response.text();
return NextResponse.json({ error: text || response.statusText }, { status: response.status });
}
return NextResponse.json(await response.json());
const responseData = await response.json();
console.log('Authorize response data:', responseData);
return NextResponse.json(responseData);
};
4 changes: 2 additions & 2 deletions app/ui/VoiceAgent.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,8 @@ export default function VoiceAgent() {
onMuteStateChange(isMuted) {
setMessages((prev) => [...prev, { role: 'data', text: `MIC → ${isMuted ? 'muted' : 'unmuted'}`, ts: Date.now() }]);
},
onConnect: (connectData) => {
setIsPushToTalk(connectData.config?.transcription.trigger === 'push_to_talk');
onConnect: (connectData: { conversationId: string | null; config?: { transcription?: { trigger?: string } } }) => {
setIsPushToTalk(connectData.config?.transcription?.trigger === 'push_to_talk');
},
onMessage: (data: any) => {
console.log(data);
Expand Down
42 changes: 42 additions & 0 deletions app/utils/conversationStorage.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import { getCloudflareContext } from '@opennextjs/cloudflare';
import { MessageWithTurnId } from '../api/agent/route';

export const inMemoryConversations = {} as Record<string, MessageWithTurnId[]>;

export const conversationKey = (conversationId: string) => `conversation:${conversationId}`;

export const loadConversation = async (kv: KVNamespace, conversationId: string) => {
const existing = await kv.get<MessageWithTurnId[]>(conversationKey(conversationId), { type: 'json' });

return existing ?? [];
};

export const persistConversation = (kv: KVNamespace, conversationId: string, messages: MessageWithTurnId[]) => {
console.log(`Persisting conversation ${conversationId} with ${messages.length} messages.`);
console.log(JSON.stringify(messages, null, 2));
kv.put(conversationKey(conversationId), JSON.stringify(messages));
};

export const getConversationStore = () => {
try {
const { env } = getCloudflareContext();
const kv = env.MESSAGES_KV;
if (!kv) throw new Error('MESSAGES_KV binding is not configured.');
console.log('Using MESSAGE_KV for conversation storage.');
return {
load: (conversationId: string) => loadConversation(kv, conversationId),
persist: (conversationId: string, messages: MessageWithTurnId[]) => persistConversation(kv, conversationId, messages)
};
} catch (error) {
if (process.env.NEXTJS_ENV === 'production') throw error;
console.warn(
'MESSAGES_KV binding unavailable in this environment – falling back to in-memory storage. Data will reset on restart. Run `npx wrangler kv namespace create MESSAGES_KV` to create the KV namespace for conversation storage.'
);
return {
load: async (conversationId: string) => inMemoryConversations[conversationId] ?? [],
persist: async (conversationId: string, messages: MessageWithTurnId[]) => {
inMemoryConversations[conversationId] = messages;
}
};
}
};
1 change: 1 addition & 0 deletions cloudflare-env.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ declare namespace Cloudflare {
interface Env {
NEXTJS_ENV: string;
ASSETS: Fetcher;
MESSAGES_KV: KVNamespace;
}
}
interface CloudflareEnv extends Cloudflare.Env {}
Expand Down