Untitled
unknown
plain_text
a year ago
880 B
7
Indexable
import { ModelFusionTextStream, asChatMessages } from "@modelfusion/vercel-ai"; import { StreamingTextResponse } from "ai"; import { ollama, streamText } from "modelfusion"; export async function POST(request) { const { messages } = await request.json(); console.log(messages); const textStream = await streamText({ model: ollama.ChatTextGenerator({ model: "llama3" }).withChatPrompt(), prompt: { system: "You are an AI chat bot. " + "Follow the user's instructions carefully.", messages: asChatMessages(messages), }, }); return new StreamingTextResponse( ModelFusionTextStream( textStream, // optional callbacks: { onToken(token) { if (token === "<|eot_id|>" || token === "<|end_of_text|>") { return; } console.log(token); }, } ) ); }
Editor is loading...
Leave a Comment