streaminggenerative user interface

Stream Updates to Visual Interfaces

In our previous example we've been streaming react components from the server to the client. By streaming the components, we open up the possibility to update these components based on state changes that occur in the server.

Client

'use client';
import { useState } from 'react';
import { ClientMessage } from './actions';
import { useActions, useUIState } from 'ai/rsc';
import { generateId } from 'ai';
// Allow streaming responses up to 30 seconds
export const maxDuration = 30;
export default function Home() {
const [input, setInput] = useState<string>('');
const [conversation, setConversation] = useUIState();
const { continueConversation } = useActions();
return (
<div>
<div>
{conversation.map((message: ClientMessage) => (
<div key={message.id}>
{message.role}: {message.display}
</div>
))}
</div>
<div>
<input
type="text"
value={input}
onChange={event => {
setInput(event.target.value);
}}
/>
<button
onClick={async () => {
setConversation((currentConversation: ClientMessage[]) => [
...currentConversation,
{ id: generateId(), role: 'user', display: input },
]);
const message = await continueConversation(input);
setConversation((currentConversation: ClientMessage[]) => [
...currentConversation,
message,
]);
}}
>
Send Message
</button>
</div>
</div>
);
}

Server

'use server';
import { getMutableAIState, streamUI } from 'ai/rsc';
import { openai } from '@ai-sdk/openai';
import { ReactNode } from 'react';
import { z } from 'zod';
import { generateId } from 'ai';
export interface ServerMessage {
role: 'user' | 'assistant';
content: string;
}
export interface ClientMessage {
id: string;
role: 'user' | 'assistant';
display: ReactNode;
}
export async function continueConversation(
input: string,
): Promise<ClientMessage> {
'use server';
const history = getMutableAIState();
const result = await streamUI({
model: openai('gpt-3.5-turbo'),
messages: [...history.get(), { role: 'user', content: input }],
text: ({ content, done }) => {
if (done) {
history.done((messages: ServerMessage[]) => [
...messages,
{ role: 'assistant', content },
]);
}
return <div>{content}</div>;
},
tools: {
deploy: {
description: 'Deploy repository to vercel',
parameters: z.object({
repositoryName: z
.string()
.describe('The name of the repository, example: vercel/ai-chatbot'),
}),
generate: async function* ({ repositoryName }) {
yield <div>Cloning repository {repositoryName}...</div>; // [!code highlight:5]
await new Promise(resolve => setTimeout(resolve, 3000));
yield <div>Building repository {repositoryName}...</div>;
await new Promise(resolve => setTimeout(resolve, 2000));
return <div>{repositoryName} deployed!</div>;
},
},
},
});
return {
id: generateId(),
role: 'assistant',
display: result.value,
};
}
import { createAI } from 'ai/rsc';
import { ServerMessage, ClientMessage, continueConversation } from './actions';
export const AI = createAI<ServerMessage[], ClientMessage[]>({
actions: {
continueConversation,
},
initialAIState: [],
initialUIState: [],
});