Skip to content

Commit cb29148

Browse files
authored
Support websearch for openAI chat completion mode (#608)
1 parent a1afcb6 commit cb29148

File tree

1 file changed

+34
-3
lines changed

1 file changed

+34
-3
lines changed

src/lib/server/endpoints/openai/endpointOai.ts

Lines changed: 34 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ import { openAIChatToTextGenerationStream } from "./openAIChatToTextGenerationSt
44
import { buildPrompt } from "$lib/buildPrompt";
55
import { OPENAI_API_KEY } from "$env/static/private";
66
import type { Endpoint } from "../endpoints";
7+
import { format } from "date-fns";
78

89
export const endpointOAIParametersSchema = z.object({
910
weight: z.number().int().positive().default(1),
@@ -54,7 +55,37 @@ export async function endpointOai(
5455
};
5556
} else if (completion === "chat_completions") {
5657
return async ({ conversation }) => {
57-
const messages = conversation.messages.map((message) => ({
58+
let messages = conversation.messages;
59+
const webSearch = conversation.messages[conversation.messages.length - 1].webSearch;
60+
61+
if (webSearch && webSearch.context) {
62+
const lastMsg = messages.slice(-1)[0];
63+
const messagesWithoutLastUsrMsg = messages.slice(0, -1);
64+
const previousUserMessages = messages.filter((el) => el.from === "user").slice(0, -1);
65+
66+
const previousQuestions =
67+
previousUserMessages.length > 0
68+
? `Previous questions: \n${previousUserMessages
69+
.map(({ content }) => `- ${content}`)
70+
.join("\n")}`
71+
: "";
72+
const currentDate = format(new Date(), "MMMM d, yyyy");
73+
messages = [
74+
...messagesWithoutLastUsrMsg,
75+
{
76+
from: "user",
77+
content: `I searched the web using the query: ${webSearch.searchQuery}. Today is ${currentDate} and here are the results:
78+
=====================
79+
${webSearch.context}
80+
=====================
81+
${previousQuestions}
82+
Answer the question: ${lastMsg.content}
83+
`,
84+
},
85+
];
86+
}
87+
88+
const messagesOpenAI = messages.map((message) => ({
5889
role: message.from,
5990
content: message.content,
6091
}));
@@ -63,8 +94,8 @@ export async function endpointOai(
6394
await openai.chat.completions.create({
6495
model: model.id ?? model.name,
6596
messages: conversation.preprompt
66-
? [{ role: "system", content: conversation.preprompt }, ...messages]
67-
: messages,
97+
? [{ role: "system", content: conversation.preprompt }, ...messagesOpenAI]
98+
: messagesOpenAI,
6899
stream: true,
69100
max_tokens: model.parameters?.max_new_tokens,
70101
stop: model.parameters?.stop,

0 commit comments

Comments
 (0)