Untitled

mail@pastecode.io avatar
unknown
typescript
6 months ago
36 kB
3
Indexable
Never
"use node";
import OpenAI from "openai";
import {
    getUrgencyPrompt,
    getEvaluateStayPrompt,
    getGenerateCampaignNamePrompt,
    getGenerateMessagePrompt,
    getConstructMarketingMessagePrompt,
    getSandboxPrompt,
    getGenerateHostReplyPrompt,
    getWithinInitialInfo,
    getAvailabilityRequestPrompt,
    getIsAvailabilityRequestPrompt,
    getIsCheckInCheckOutRequestPrompt,
    getSingleAvailabilityRequestPrompt,
    getIsInCustomUrgencyPrompt,
    getDLExtract,
    getQAVerdictPrompt,
    getDLFilter,
} from "../../utils/prompts";
import { getCurrentDateString } from "../utils/pmsCalendar";
import { Doc } from "../../_generated/dataModel";
import { MessageItem } from "../../types";
import { truncateTokens } from "../utils/openai";

const completionOptions = {
    maxRetries: 3,
    timeout: 20000,
};

export default class Agent {
    public openai: OpenAI;

    constructor(userId: string) {
        const apiKey = process.env.OPENAI_API_KEY;
        if (!apiKey) {
            console.error("OPENAI_API_KEY not set");
        }
        const configuration =
            process.env.ENVIRONMENT === "production"
                ? new OpenAI({
                      apiKey,
                      baseURL: "https://oai.hconeai.com/v1",
                      defaultHeaders: {
                          "Helicone-Auth": `Bearer ${process.env.HELICONE_API_KEY}`,
                          "Helicone-User-Id": userId,
                      },
                  })
                : new OpenAI({
                      apiKey,
                  });
        this.openai = configuration;
    }

    async determineIfInCustomUrgencyList({
        currentMessage,
        hostProfile,
    }: {
        currentMessage: string;
        hostProfile: Doc<"hostProfiles"> | null;
    }): Promise<boolean> {
        const urgencyString = getCustomUrgencyString(hostProfile);
        if (!urgencyString.trim()) return false;

        const prompt = getIsInCustomUrgencyPrompt({
            message: currentMessage,
            customUrgency: urgencyString,
        });

        const response = await this.openai.chat.completions.create(
            {
                model: "gpt-3.5-turbo",
                temperature: 0.1,
                messages: [
                    {
                        role: "user",
                        content: prompt,
                    },
                ],
                tools: [
                    {
                        type: "function",
                        function: {
                            name: "isInAtLeastOneTopic",
                            description:
                                "Determine whether the guest message falls into at least one of the topics provided",
                            parameters: {
                                type: "object",
                                properties: {
                                    isUrgent: {
                                        type: "boolean",
                                        description:
                                            "Whether the message falls into at least one of the topics provided",
                                    },
                                },
                            },
                        },
                    },
                ],
            },
            { ...completionOptions }
        );

        if (!response) return true;

        const { isInAtLeastOneTopic } = parseResponseToFunctionArgs(response);
        return isInAtLeastOneTopic;
    }

    async determineUrgency({
        currentMessage,
        previousMessages,
        hostProfile,
    }: {
        currentMessage: string;
        previousMessages: { text: string; author: string }[];
        hostProfile: Doc<"hostProfiles"> | null;
    }): Promise<"Urgent" | "Not Urgent"> {
        const filteredPreviousMessages = previousMessages.filter(
            (message) => message.text.split(" ").length < 250
        );

        const fullIntentInstructions = getUrgencyPrompt({
            customUrgency: getCustomUrgencyString(hostProfile),
            previousMessages: getMessageString(filteredPreviousMessages, 8),
            currentMessage: "guest: " + currentMessage,
        });

        const model = "gpt-3.5-turbo-1106";
        const max_tokens = 5;
        const temperature = 0;

        const response = await this.openai.chat.completions.create(
            {
                model,
                max_tokens,
                temperature,
                messages: [
                    {
                        role: "user",
                        content: fullIntentInstructions,
                    },
                ],
            },
            { ...completionOptions }
        );

        if (!response) {
            return "Urgent";
        }
        const resBody = response.choices[0].message.content || "";
        console.log("urgency response:", resBody);
        const intentResponseObj = parseIntentResponse(resBody);
        return intentResponseObj;
    }

    async determineIfWithinInitialInfo({
        listingInformation,
        previousMessages,
        message,
    }: {
        listingInformation: Doc<"listings">;
        previousMessages: { text: string; author: string }[];
        message: string;
    }): Promise<boolean> {
        const filteredPreviousMessages = previousMessages.filter(
            (message) => message.text.split(" ").length < 250
        );
        const additionalInfo = listingInformation.formData[5].value;
        const faq = getListingFaqString(listingInformation);
        const learnings = getSavedLearningsString(listingInformation);
        const prompt = getWithinInitialInfo({
            additionalInfo: `${additionalInfo}\n${learnings}`,
            faq,
            message,
            policies: getPoliciesString(listingInformation),
            previousMessages: getMessageString(filteredPreviousMessages, 8),
        });

        const model = "gpt-3.5-turbo-1106";
        const max_tokens = 1;
        const temperature = 0;

        const response = await this.openai.chat.completions.create(
            {
                model,
                max_tokens,
                temperature,
                messages: [
                    {
                        role: "user",
                        content: prompt,
                    },
                ],
            },
            { ...completionOptions }
        );

        const resBody = response.choices[0].message.content || "";
        return resBody.toLowerCase().includes("yes");
    }

    async generateSandboxMessage({
        messages,
        availabilityResponses,
        listingInformation,
        hostProfile,
    }: {
        messages: { text: string; author: string }[];
        availabilityResponses: string;
        listingInformation: Doc<"listings">;
        hostProfile: Doc<"hostProfiles"> | null;
    }): Promise<string> {
        const prompt = getSandboxPrompt({
            host: {
                name: hostProfile?.preferredName || "",
                bio: hostProfile?.bio || "",
            },
            listing: {
                name: listingInformation.name,
                address: listingInformation.address,
                info: getListingPrompt(listingInformation),
                price: listingInformation.price,
                policies: getPoliciesString(listingInformation),
                learnings: getSavedLearningsString(listingInformation),
            },
            date: getCurrentDateString(),
            availabilityResponse: availabilityResponses,
        });
        const response = await this.openai.chat.completions.create(
            {
                model: "gpt-3.5-turbo-1106",
                temperature: 0.2,
                messages: [
                    {
                        role: "system",
                        content: prompt,
                    },
                    ...getSquishedMessages(messages as MessageItem[]),
                ],
            },
            { ...completionOptions }
        );
        if (!response) return "OpenAI error";
        const resBody = response.choices[0].message.content || "";
        const regex = /\[[^\]]*\]/g;

        return resBody.replace(regex, "");
    }

    async generateText({
        previousMessages,
        text,
    }: {
        previousMessages: MessageItem[];
        text: string;
    }): Promise<string> {
        const lookbackWindow = previousMessages.length > 5 ? 5 : 0;
        const messageString = getMessageString(
            previousMessages.slice(-1),
            lookbackWindow
        );
        const fullPrompt: string = getGenerateMessagePrompt({
            messages: messageString,
            text,
        });
        const response = await this.openai.chat.completions.create(
            {
                model: "gpt-3.5-turbo-1106",
                temperature: 0.2,
                messages: [
                    {
                        role: "system",
                        content: fullPrompt,
                    },
                    { role: "user", content: text },
                ],
            },
            { ...completionOptions }
        );
        if (!response) return "OpenAI error";
        const resBody = response.choices[0].message.content || "";
        // remove all quotes
        return resBody.replace(/"/g, "");
    }

    async generateHostReply({
        messages,
        availabilityResponses,
        listing,
        reservation,
        hostProfile,
    }: {
        messages: MessageItem[];
        availabilityResponses: string;
        listing: Doc<"listings">;
        reservation: Doc<"reservations">;
        hostProfile: Doc<"hostProfiles"> | null;
    }): Promise<string> {
        // need to check "status" field for reservation on other PMS platforms
        const prompt = getGenerateHostReplyPrompt({
            guestName: reservation.guestName || "Guest",
            reservation: {
                arrivalDate: reservation.arrivalDate,
                departureDate: reservation.departureDate,
                isInquiry: reservation.status?.includes("inquiry") || false,
            },
            host: {
                name: hostProfile?.preferredName || "",
                bio: hostProfile?.bio || "",
            },
            listing: {
                name: listing.name,
                type: listing.propertyType || "Rental Property",
                address: listing.address,
                info: getListingPrompt(listing),
                price: listing.price,
                policies: getPoliciesString(listing),
                learnings: getSavedLearningsString(listing),
            },
            availabilityResponse: availabilityResponses,
            currentDate: getCurrentDateString(),
        });

        let windowedMessages = messages.slice(-10);

        const model = "gpt-4-1106-preview";
        const max_tokens = 600;
        const temperature = 0.2;

        const response = await this.openai.chat.completions.create(
            {
                model,
                max_tokens,
                temperature,
                messages: [
                    {
                        role: "system",
                        content: prompt,
                    },
                    ...getSquishedMessages(windowedMessages),
                ],
            },
            { ...completionOptions }
        );

        if (!response) return "";
        const resBody = response.choices[0].message.content || "";
        const regex = /\[[^\]]*\]/g;

        return resBody.replace(regex, "");
    }

    async generateQAVerdict({
        messages,
        resBody, // TODO: @JP Add more info to bolster QA prompt with all background context.
    }: {
        messages: MessageItem[];
        resBody: string;
    }): Promise<string> {
        let chatContextMessages = getMessageString(messages.slice(-10));
        let unverifiedMessage = resBody;

        const prompt = getQAVerdictPrompt({
            chatContext: chatContextMessages,
            unverifiedMessage: unverifiedMessage,
        });

        const response = await this.openai.chat.completions.create(
            {
                model: "gpt-3.5-turbo-1106",
                temperature: 0.2,
                messages: [
                    {
                        role: "system",
                        content: prompt,
                    },
                    ...getSquishedMessages(messages.slice(-10)),
                ],
            },
            { ...completionOptions }
        );

        if (!response) return "";
        const replyText = response.choices[0].message.content || "";
        const failRegex = /fail/i;

        return failRegex.test(replyText) ? "Fail" : "Quality Assured";
    }

    async getNearbyRestaurants(listing: Doc<"listings">): Promise<string> {
        const prompt = `Given an address, please list 5 restaurants, 2 sentences about the restaurant, and their corresponding driving travel time (in minutes) from the address. Number them from 1 to 5.
        Example: 1. John's Pizza: Ranked #1 pizza shop in New York. Great vibes for drinks and music. - 5 mins`;
        const { address, city, country } = listing;
        const response = await this.openai.chat.completions.create(
            {
                model: "gpt-3.5-turbo-1106",
                temperature: 0,
                messages: [
                    {
                        role: "system",
                        content: prompt,
                    },
                    {
                        role: "user",
                        content: `${address}, ${city}, ${country}` || "",
                    },
                ],
            },
            { ...completionOptions }
        );
        if (!response) return "";
        const resBody = response.choices[0].message.content;
        return resBody || "";
    }

    async getPropertyType(propertyInfo: string): Promise<string> {
        const model = "gpt-3.5-turbo";
        const max_tokens = 10;
        const prompt = `[no prose][output only single string] Given information about a rental property, identify the type of property it is. The possible types are apartment, condo, villa, bungalow, cabin, chalet, loft, guesthouse, hostel, hotel, houseboat, campsite, tent, RV, camper, treehouse, flat, house, home, shared-home, dome, mansion, castle, A-frame, room, bed and breakfast. If, based on the information, the property does not fit into the aforementioned categories, return the string 'Rental property'.`;

        const messages = [
            {
                role: "system",
                content: prompt,
            },
            {
                role: "user",
                content: `${propertyInfo}` || "",
            },
        ];

        // TODO:
        // To apply truncateTokens to all functions feasibly...
        // 1) make a function call and pass as parameters into GPT function
        // 2) wrap the function call and pass parameters into wrapping function
        const updatedMessages = truncateTokens(messages, model, max_tokens);
        const response = await this.openai.chat.completions.create(
            {
                model,
                max_tokens,
                temperature: 0,
                messages: updatedMessages,
            },
            { ...completionOptions }
        );

        if (!response) return "";
        const resBody = response.choices[0]!.message!.content;

        console.log(response);

        return resBody || "";
    }

    async getNearbyAttractions(listing: Doc<"listings">): Promise<string> {
        const prompt = `Given an address, please list 5 attractions, 2 sentences about the attraction, and their corresponding driving travel (in minutes) from the address. Number them from 1 to 5.
        Example: 1. Golden Gate Bridge: The Golden Gate Bridge is a suspension bridge spanning the Golden Gate, the one-mile-wide strait connecting San Francisco Bay and the Pacific Ocean. - 10 mins`;
        const { address, city, country } = listing;
        const response = await this.openai.chat.completions.create(
            {
                model: "gpt-3.5-turbo-1106",
                temperature: 0,
                messages: [
                    {
                        role: "system",
                        content: prompt,
                    },
                    {
                        role: "user",
                        content: `${address}, ${city}, ${country}` || "",
                    },
                ],
            },
            { ...completionOptions }
        );
        if (!response) return "";
        const resBody = response.choices[0]!.message!.content;

        return resBody || "";
    }

    async dynamicLearning({ qna }: { qna: string }): Promise<string> {
        const learningRes = await this.openai.chat.completions.create({
            model: "gpt-3.5-turbo-1106",
            temperature: 0,
            messages: [
                {
                    role: "user",
                    content: getDLExtract({
                        qna,
                    }),
                },
            ],
        });

        const learnings = learningRes.choices[0]!.message!.content;

        if (!learnings) return "";

        const filteredRes = await this.openai.chat.completions.create(
            {
                model: "gpt-3.5-turbo-1106",
                temperature: 0,
                messages: [
                    {
                        role: "user",
                        content: getDLFilter({
                            learnings,
                        }),
                    },
                ],
            },
            { ...completionOptions }
        );

        const resBody = filteredRes.choices[0]!.message!.content;
        return resBody || "";
    }

    // TODO: check context window limit
    async evaluatePreviousStay({
        messages,
    }: {
        messages: MessageItem[];
    }): Promise<string> {
        const prompt = getEvaluateStayPrompt({
            messages: getMessageString(messages),
        });

        const response = await this.openai.chat.completions.create(
            {
                model: "gpt-4-1106-preview",
                temperature: 0,
                messages: [
                    {
                        role: "user",
                        content: prompt,
                    },
                ],
            },
            { ...completionOptions }
        );
        if (!response) return "";
        const resBody: string = response.choices[0]!.message!.content || "";

        try {
            const { list_info } = JSON.parse(resBody);

            if (list_info) return list_info;
            else return "No information from messages";
        } catch {
            return "No information from messages";
        }
    }

    async generateRecaptureCampaignName({
        campaignInfo,
    }: {
        campaignInfo: string;
    }): Promise<string> {
        const prompt = getGenerateCampaignNamePrompt({
            campaignInfo,
        });

        const response = await this.openai.chat.completions.create(
            {
                model: "gpt-3.5-turbo-1106",
                temperature: 0.2,
                messages: [
                    {
                        role: "user",
                        content: prompt,
                    },
                ],
            },
            { ...completionOptions }
        );

        if (!response) return "";
        const resBody: string = response.choices[0]!.message!.content || "";

        // remove all quotes
        return resBody.replace(/"/g, "");
    }

    async checkIsAvailabilityRequest({
        reservation,
        message,
        previousMessages,
    }: {
        reservation: Doc<"reservations">;
        message: string;
        previousMessages: MessageItem[];
    }): Promise<{ isRequest: boolean }> {
        const filteredPreviousMessages = previousMessages.filter(
            (message) => message.text.split(" ").length < 250
        );
        const prompt = getIsAvailabilityRequestPrompt({
            message,
            previousMessages: getMessageString(filteredPreviousMessages, 2),
            reservationType: reservation.status
                ?.toLowerCase()
                .includes("inquiry")
                ? "Inquiry (not finalized)"
                : "Reservation (finalized)",
        });

        const response = await this.openai.chat.completions.create(
            {
                model: "gpt-3.5-turbo",
                temperature: 0.1,
                messages: [
                    {
                        role: "user",
                        content: prompt,
                    },
                ],
                tools: [
                    {
                        type: "function",
                        function: {
                            name: "getIsRequest",
                            description:
                                "Determine whether the guest message is a related to availability dates",
                            parameters: {
                                type: "object",
                                properties: {
                                    isDateRequest: {
                                        type: "boolean",
                                        description:
                                            "Whether the message is asking for availability dates",
                                    },
                                },
                            },
                        },
                    },
                ],
            },
            { ...completionOptions }
        );

        if (!response) return { isRequest: false };

        const { isDateRequest } = parseResponseToFunctionArgs(response);

        return { isRequest: isDateRequest };
    }

    async getAvailabilityIntent({
        message,
    }: {
        message: string;
    }): Promise<{ intent: string }> {
        const prompt = getIsCheckInCheckOutRequestPrompt({
            message,
        });

        const response = await this.openai.chat.completions.create(
            {
                model: "gpt-3.5-turbo",
                temperature: 0.1,
                messages: [
                    {
                        role: "user",
                        content: prompt,
                    },
                ],
                tools: [
                    {
                        type: "function",
                        function: {
                            name: "getRequestIntent",
                            description:
                                "Get the intent of the availability request",
                            parameters: {
                                type: "object",
                                properties: {
                                    intent: {
                                        type: "string",
                                        description:
                                            "The intent of the availability request. Either 'earlyCheckin', 'lateCheckout', or 'none'",
                                    },
                                },
                            },
                        },
                    },
                ],
            },
            { ...completionOptions }
        );

        if (!response) return { intent: "" };

        const { intent } = parseResponseToFunctionArgs(response);

        return { intent };
    }

    async getAvailabilityRequestStartAndEndDates({
        message,
        reservation,
        previousMessages,
    }: {
        message: string;
        reservation: Doc<"reservations">;
        previousMessages: MessageItem[];
    }): Promise<{ startDate: string; endDate: string }> {
        const isInquiry = reservation.status?.toLowerCase().includes("inquiry");
        const prompt = getAvailabilityRequestPrompt({
            message,
            reservation: {
                checkInDate: reservation.arrivalDate,
                checkOutDate: reservation.departureDate,
                type: isInquiry
                    ? "Inquiry (not finalized)"
                    : "Reservation (finalized)",
            },
            previousMessages: getMessageString(previousMessages, 8),
            currentDate: getCurrentDateString(),
        });

        const response = await this.openai.chat.completions.create(
            {
                model: "gpt-4-1106-preview",
                temperature: 0.1,
                messages: [
                    {
                        role: "user",
                        content: prompt,
                    },
                ],
                tools: [
                    {
                        type: "function",
                        function: {
                            name: "getDates",
                            description:
                                "Get the start and end dates of the availability request",
                            parameters: {
                                type: "object",
                                properties: {
                                    isRequest: {
                                        type: "boolean",
                                        description:
                                            "Whether the guest message is requesting a change in either the start date or end date or both",
                                    },
                                    startDate: {
                                        type: "string",
                                        description:
                                            "New requested start date in the format YYYY-MM-DD.",
                                    },
                                    endDate: {
                                        type: "string",
                                        desciption:
                                            "New requested end date in the format YYYY-MM-DD.",
                                    },
                                },
                            },
                        },
                    },
                ],
            },
            { ...completionOptions }
        );

        if (!response) return { startDate: "", endDate: "" };

        const { isRequest, startDate, endDate } =
            parseResponseToFunctionArgs(response);
        return { startDate, endDate };
    }

    async getAvailabilityRequestSingleDate({
        reservation,
        listing,
        requestType,
    }: {
        reservation: Doc<"reservations">;
        listing: Doc<"listings">;
        requestType: "earlyCheckin" | "lateCheckout";
    }): Promise<{ date: string }> {
        const prompt = getSingleAvailabilityRequestPrompt({
            reservation: {
                checkInDate: reservation.arrivalDate,
                checkOutDate: reservation.departureDate,
            },
            currentDate: getCurrentDateString(),
            policy: getPolicyString(listing, requestType),
            requestType:
                requestType == "earlyCheckin"
                    ? "early check-in"
                    : "late check-out",
        });

        const response = await this.openai.chat.completions.create(
            {
                model: "gpt-3.5-turbo",
                temperature: 0.1,
                messages: [
                    {
                        role: "user",
                        content: prompt,
                    },
                ],
                functions: [
                    {
                        name: "getDate",
                        description:
                            "Get the date of the early check-in or late check-out request",
                        parameters: {
                            type: "object",
                            properties: {
                                date: {
                                    type: "string",
                                    description:
                                        "Date that must be checked for availability in YYYY-MM-DD format",
                                },
                            },
                        },
                    },
                ],
            },
            { ...completionOptions }
        );

        const { date } = parseResponseToFunctionArgs(response);
        return { date };
    }

    async generateRecaptureOutbound({
        guestInfo,
        previousListing,
        previousStayEvaluation,
        newListing,
        campaignInfo,
        channel,
    }: {
        guestInfo: string;
        previousListing: Doc<"listings">;
        previousStayEvaluation: string;
        newListing: Doc<"listings">;
        campaignInfo: string;
        channel: "email" | "sms";
    }): Promise<string> {
        const prompt = getConstructMarketingMessagePrompt({
            isEmail: channel === "email",
            guestInfo,
            previousListing: getBasicListingInfo(previousListing),
            currentListing: getListingPrompt(newListing),
            previousStayEvaluation,
            campaignInfo,
            date: getCurrentDateString(),
        });

        const response = await this.openai.chat.completions.create(
            {
                model: "gpt-4-1106-preview",
                temperature: 0,
                messages: [
                    {
                        role: "user",
                        content: prompt,
                    },
                ],
            },
            { ...completionOptions }
        );
        if (!response) return "";
        const resBody: string = response.choices[0].message.content || "";

        return resBody || "";
    }
}

const getListingPrompt = (listings: any): string => {
    // get listing information
    const { formData, faqData } = listings;
    let formDataString = formData
        .map(({ label, value }: any, index: number) => {
            if (label === "Personality or Tone of Voice") return ``;
            return `${label}: ${value || "none"}`;
        })
        .join("\n");
    let faqDataString = faqData
        .map(({ label, value }: any, index: number) => {
            return `${label}: ${value || "none"}`;
        })
        .join("\n");
    return `${formDataString}\n\nFrequently Asked Questions:\n${faqDataString}\n`;
};

const getBasicListingInfo = (listing: Doc<"listings">): string => {
    const { name, address, formData } = listing;
    const info = formData[0].value;
    return `Name: ${name}\nAddress: ${address}\nListing Information: ${info}`;
};

const getListingFaqString = (listing: Doc<"listings">): string => {
    const { faqData } = listing;
    return faqData
        .map(({ label, value }: any, index: number) => {
            return `Q: "${label}", A: "${value || "NO ANSWER"}"`;
        })
        .join("\n");
};

const getSavedLearningsString = (listing: Doc<"listings">): string => {
    const { savedLearnings } = listing;
    if (!savedLearnings) return "";
    return savedLearnings.map((learning: string) => `- ${learning}`).join("\n");
};

const getPersonalityPrompt = (listings: any): string => {
    if (!listings) return "";
    // get listing information
    const { formData } = listings;
    let personalityString = formData
        .filter(
            ({ label, value }: any, index: number) =>
                label === "Personality or Tone of Voice"
        )
        .map(({ label, value }: any, index: number) => {
            return value;
        })
        .join("");
    return `${personalityString}`;
};

const parseIntentResponse = (response: string): "Urgent" | "Not Urgent" => {
    if (!response) return "Urgent";
    let urgency = "";
    try {
        urgency = response.replace(/[\t\n\r{}]/g, "");
    } catch (e) {
        console.log("urgency:", urgency);
        console.log(e);
    }
    const finalUrgency =
        urgency && urgency.includes("Not Urgent") ? "Not Urgent" : "Urgent";
    return finalUrgency;
};

const getSquishedMessages = (
    messages: MessageItem[]
): { role: "user" | "assistant"; content: string }[] => {
    const windowed: { role: "user" | "assistant"; content: string }[] = [];
    let buffer: string[] = [];
    let lastRole: "assistant" | "user" = "assistant";

    messages.forEach(({ text, author }) => {
        const role = author == "host" ? "assistant" : "user";

        if (role === lastRole && role === "user") {
            buffer.push(text);
        } else {
            if (buffer.length > 0) {
                windowed.push({ role: "user", content: buffer.join("\n\n") });
                buffer = [];
            }
            if (role === "user") {
                buffer.push(text);
            } else {
                windowed.push({ role, content: text });
            }
        }
        lastRole = role;
    });

    if (buffer.length > 0) {
        windowed.push({ role: "user", content: buffer.join("\n\n") });
    }

    return windowed.map(({ role, content }) => {
        return { role, content: removeCSSCode(content) };
    });
};

function removeCSSCode(input: string): string {
    // Define a regular expression pattern to match simple CSS code blocks.
    // This will match a string that starts with any character (for the selector),
    // followed by curly braces with any content inside, across multiple lines.
    // Nested blocks and more complex CSS constructs might not be handled by this pattern.
    const pattern = /[^\r\n,{}]+(\s*{[\s\S]*?}\s*)+/gm;

    // Replace the matched CSS blocks with an empty string
    const stringWithoutCSS = input.replace(pattern, "").trim();

    return stringWithoutCSS;
}

const getMessageString = (
    messages: any,
    lookbackWindow: number = 0
): string => {
    // get last X messages
    // if lookbackWindow is 0, get all messages
    // if lookbackWindow is more than length of messages, get all messages
    const truncatedMessages = messages.slice(-1 * lookbackWindow);
    return truncatedMessages
        .map(
            (
                { text, author }: { text: string; author: string },
                index: number
            ) => {
                return `${author}: "${text}"`;
            }
        )
        .join("\n");
};

const getCustomUrgencyString = (hostProfile: Doc<"hostProfiles"> | null) => {
    if (!hostProfile || !hostProfile.customUrgency) return "";
    const { customUrgency } = hostProfile;
    let formattedString = customUrgency.split("\n").map((line) => {
        const trimmedLine = line.trim();
        if (trimmedLine && trimmedLine[0] === "-") return trimmedLine;
        return "- " + trimmedLine;
    });
    return formattedString.join("\n");
};

const getPoliciesString = (listing: Doc<"listings">): string => {
    const { policies } = listing;
    if (!policies) return "";
    return policies
        .map(({ name, isChecked, value }: any, index: number) => {
            return `${name}: ${isChecked ? value : "Not Allowed"}`;
        })
        .join("\n");
};

const getPolicyString = (listing: Doc<"listings">, varName: string): string => {
    const { policies } = listing;
    if (!policies) return "";
    const policy = policies.find((policy: any) => policy.name === varName);
    if (!policy) return "";
    return `${policy.name}: ${policy.isChecked ? "Allowed" : "Not Allowed"}${
        policy.isChecked && policy.value ? `, Details: ${policy.value}` : ""
    }`;
};

const parseResponseToFunctionArgs = (
    response: OpenAI.Chat.Completions.ChatCompletion
) => {
    const toolCalls = response.choices[0].message.tool_calls;
    if (!toolCalls) return {};

    const firstArg = toolCalls[0].function.arguments;

    return JSON.parse(firstArg);
};
Leave a Comment