AI and UI States
In our previous examples, there seems to be a recurring pattern of having a state for the language model on the server, and a state for the UI on the client. However, it can get tricky to manage these two states separately. For example, if the user types something in the input field, we need to update the UI state, but we also need to send the input to the server to update the AI state.
As a result, the ai/rsc
library provides a way to seamlessly manage both states together using a context provider that wraps the client application and makes the AI state available to all its children. This way, the client application can access and update the AI state directly keeping the two states in sync.
Client
Let's use layout to wrap the children components of page with the AI context provider.
import { ReactNode } from 'react';import { AI } from './ai';
export default function RootLayout({ children,}: Readonly<{ children: ReactNode }>) { return ( <html lang="en"> <body> <AI>{children}</AI> </body> </html> );}
'use client';
import { useState } from 'react';import { ClientMessage } from './actions';import { useActions, useUIState } from 'ai/rsc';import { generateId } from 'ai';
// Allow streaming responses up to 30 secondsexport const maxDuration = 30;
export default function Home() { const [input, setInput] = useState<string>(''); const [conversation, setConversation] = useUIState(); const { continueConversation } = useActions();
return ( <div> <div> {conversation.map((message: ClientMessage) => ( <div key={message.id}> {message.role}: {message.display} </div> ))} </div>
<div> <input type="text" value={input} onChange={event => { setInput(event.target.value); }} /> <button onClick={async () => { setConversation((currentConversation: ClientMessage[]) => [ ...currentConversation, { id: generateId(), role: 'user', display: input }, ]);
const message = await continueConversation(input);
setConversation((currentConversation: ClientMessage[]) => [ ...currentConversation, message, ]); }} > Send Message </button> </div> </div> );}
Server
'use server';
import { getMutableAIState, streamUI } from 'ai/rsc';import { openai } from '@ai-sdk/openai';import { ReactNode } from 'react';import { z } from 'zod';import { generateId } from 'ai';import { Stock } from '@ai-studio/components/stock';
export interface ServerMessage { role: 'user' | 'assistant'; content: string;}
export interface ClientMessage { id: string; role: 'user' | 'assistant'; display: ReactNode;}
export async function continueConversation( input: string,): Promise<ClientMessage> { 'use server';
const history = getMutableAIState();
const result = await streamUI({ model: openai('gpt-3.5-turbo'), messages: [...history.get(), { role: 'user', content: input }], text: ({ content, done }) => { if (done) { history.done((messages: ServerMessage[]) => [ ...messages, { role: 'assistant', content }, ]); }
return <div>{content}</div>; }, tools: { showStockInformation: { description: 'Get stock information for symbol for the last numOfMonths months', parameters: z.object({ symbol: z .string() .describe('The stock symbol to get information for'), numOfMonths: z .number() .describe('The number of months to get historical information for'), }), generate: async ({ symbol, numOfMonths }) => { history.done((messages: ServerMessage[]) => [ ...messages, { role: 'assistant', content: `Showing stock information for ${symbol}`, }, ]);
return <Stock symbol={symbol} numOfMonths={numOfMonths} />; }, }, }, });
return { id: generateId(), role: 'assistant', display: result.value, };}
Finally, create the AI context provider.
import { createAI } from 'ai/rsc';import { ServerMessage, ClientMessage, continueConversation } from './actions';
export const AI = createAI<ServerMessage[], ClientMessage[]>({ actions: { continueConversation, }, initialAIState: [], initialUIState: [],});