Untitled
unknown
plain_text
2 years ago
880 B
10
Indexable
import { ModelFusionTextStream, asChatMessages } from "@modelfusion/vercel-ai";
import { StreamingTextResponse } from "ai";
import { ollama, streamText } from "modelfusion";
export async function POST(request) {
const { messages } = await request.json();
console.log(messages);
const textStream = await streamText({
model: ollama.ChatTextGenerator({ model: "llama3" }).withChatPrompt(),
prompt: {
system:
"You are an AI chat bot. " +
"Follow the user's instructions carefully.",
messages: asChatMessages(messages),
},
});
return new StreamingTextResponse(
ModelFusionTextStream(
textStream,
// optional callbacks:
{
onToken(token) {
if (token === "<|eot_id|>" || token === "<|end_of_text|>") {
return;
}
console.log(token);
},
}
)
);
}
Editor is loading...
Leave a Comment