State ManagementSave and Restore States

Save and Restore AI State

Sometimes conversations with language models can get interesting and you might want to save the state of so you can revisit it or continue the conversation later.

createAI has an experimental callback function called unstable_onSetAI that gets called whenever the AI state changes. You can use this to save the AI state to a filename or a database.

Client

app/layout.tsx
import { AI, ServerMessage } from './actions';
export default function RootLayout({
children,
}: Readonly<{
children: React.ReactNode;
}>) {
// get chat history from database
const history: ServerMessage[] = getChat();
return (
<html lang="en">
<body>
<AI initialAIState={history} initialUIState={[]}>
{children}
</AI>
</body>
</html>
);
}
app/page.tsx
'use client';
import { useState } from 'react';
import { ClientMessage } from './actions';
import { useActions, useUIState } from 'ai/rsc';
import { generateId } from 'ai';
// Force the page to be dynamic and allow streaming responses up to 30 seconds
export const dynamic = 'force-dynamic';
export const maxDuration = 30;
export default function Home() {
const [input, setInput] = useState<string>('');
const [conversation, setConversation] = useUIState();
const { continueConversation } = useActions();
return (
<div>
<div>
{conversation.map((message: ClientMessage) => (
<div key={message.id}>
{message.role}: {message.display}
</div>
))}
</div>
<div>
<input
type="text"
value={input}
onChange={event => {
setInput(event.target.value);
}}
/>
<button
onClick={async () => {
setConversation((currentConversation: ClientMessage[]) => [
...currentConversation,
{ id: generateId(), role: 'user', display: input },
]);
const message = await continueConversation(input);
setConversation((currentConversation: ClientMessage[]) => [
...currentConversation,
message,
]);
}}
>
Send Message
</button>
</div>
</div>
);
}

Server

We will use the callback function to listen to state changes and save the conversation once we receive a done event.

app/actions.tsx
'use server';
import { createAI, getAIState, getMutableAIState, streamUI } from 'ai/rsc';
import { openai } from '@ai-sdk/openai';
import { ReactNode } from 'react';
import { z } from 'zod';
import { generateId } from 'ai';
import { Stock } from '@ai-studio/components/stock';
export interface ServerMessage {
role: 'user' | 'assistant' | 'function';
content: string;
}
export interface ClientMessage {
id: string;
role: 'user' | 'assistant' | 'function';
display: ReactNode;
}
export async function continueConversation(
input: string,
): Promise<ClientMessage> {
'use server';
const history = getMutableAIState();
const result = await streamUI({
model: openai('gpt-3.5-turbo'),
messages: [...history.get(), { role: 'user', content: input }],
text: ({ content, done }) => {
if (done) {
history.done((messages: ServerMessage[]) => [
...messages,
{ role: 'user', content: input },
{ role: 'assistant', content },
]);
}
return <div>{content}</div>;
},
tools: {
showStockInformation: {
description:
'Get stock information for symbol for the last numOfMonths months',
parameters: z.object({
symbol: z
.string()
.describe('The stock symbol to get information for'),
numOfMonths: z
.number()
.describe('The number of months to get historical information for'),
}),
generate: async ({ symbol, numOfMonths }) => {
history.done((messages: ServerMessage[]) => [
...messages,
{
role: 'function',
name: 'showStockInformation',
content: JSON.stringify({ symbol, numOfMonths }),
},
]);
return <Stock symbol={symbol} numOfMonths={numOfMonths} />;
},
},
},
});
return {
id: generateId(),
role: 'assistant',
display: result.value,
};
}
export const AI = createAI<ServerMessage[], ClientMessage[]>({
actions: {
continueConversation,
},
unstable_onSetAIState: async ({ state, done }) => {
'use server';
if (done) {
saveChat(state);
}
},
unstable_onGetUIState: async () => {
'use server';
const history: ServerMessage[] = getAIState();
return history.map(({ role, content }) => ({
id: generateId(),
role,
display:
role === 'function' ? <Stock {...JSON.parse(content)} /> : content,
}));
},
});