Untitled

mail@pastecode.io avatar
unknown
plain_text
2 months ago
880 B
5
Indexable
Never
import { ModelFusionTextStream, asChatMessages } from "@modelfusion/vercel-ai";
import { StreamingTextResponse } from "ai";
import { ollama, streamText } from "modelfusion";

export async function POST(request) {
  const { messages } = await request.json();
  console.log(messages);
  const textStream = await streamText({
    model: ollama.ChatTextGenerator({ model: "llama3" }).withChatPrompt(),
    prompt: {
      system:
        "You are an AI chat bot. " +
        "Follow the user's instructions carefully.",

      messages: asChatMessages(messages),
    },
  });
  return new StreamingTextResponse(
    ModelFusionTextStream(
      textStream,
      // optional callbacks:
      {
        onToken(token) {
          if (token === "<|eot_id|>" || token === "<|end_of_text|>") {
            return;
          }
          console.log(token);
        },
      }
    )
  );
}
Leave a Comment