diff --git a/src/lib/server/endpoints/endpoints.ts b/src/lib/server/endpoints/endpoints.ts index 0cc6039fe2b..934fc0f8886 100644 --- a/src/lib/server/endpoints/endpoints.ts +++ b/src/lib/server/endpoints/endpoints.ts @@ -48,6 +48,8 @@ export interface EndpointParameters { toolResults?: ToolResult[]; isMultimodal?: boolean; conversationId?: ObjectId; + userId?: ObjectId; + userEmail?: string; } interface CommonEndpoint { diff --git a/src/lib/server/endpoints/openai/endpointOai.ts b/src/lib/server/endpoints/openai/endpointOai.ts index 2fa7ec2e88f..eb436dc2675 100644 --- a/src/lib/server/endpoints/openai/endpointOai.ts +++ b/src/lib/server/endpoints/openai/endpointOai.ts @@ -158,7 +158,15 @@ export async function endpointOai( "Tools are not supported for 'completions' mode, switch to 'chat_completions' instead" ); } - return async ({ messages, preprompt, continueMessage, generateSettings, conversationId }) => { + return async ({ + messages, + preprompt, + continueMessage, + generateSettings, + conversationId, + userId, + userEmail, + }) => { const prompt = await buildPrompt({ messages, continueMessage, @@ -183,6 +191,8 @@ export async function endpointOai( body: { ...body, ...extraBody }, headers: { "ChatUI-Conversation-ID": conversationId?.toString() ?? "", + "ChatUI-User-Id": userId?.toString() ?? "", + "ChatUI-User-Email": userEmail, "X-use-cache": "false", }, }); @@ -197,6 +207,8 @@ export async function endpointOai( tools, toolResults, conversationId, + userId, + userEmail, }) => { // Format messages for the chat API, handling multimodal content if supported let messagesOpenAI: OpenAI.Chat.Completions.ChatCompletionMessageParam[] = @@ -298,6 +310,8 @@ export async function endpointOai( body: { ...body, ...extraBody }, headers: { "ChatUI-Conversation-ID": conversationId?.toString() ?? "", + "ChatUI-User-Id": userId?.toString() ?? "", + "ChatUI-User-Email": userEmail, "X-use-cache": "false", }, } @@ -310,6 +324,8 @@ export async function endpointOai( body: { ...body, ...extraBody }, headers: { "ChatUI-Conversation-ID": conversationId?.toString() ?? "", + "ChatUI-User-Id": userId?.toString() ?? "", + "ChatUI-User-Email": userEmail, "X-use-cache": "false", }, } diff --git a/src/lib/server/textGeneration/generate.ts b/src/lib/server/textGeneration/generate.ts index 3445efdc906..8f497c57360 100644 --- a/src/lib/server/textGeneration/generate.ts +++ b/src/lib/server/textGeneration/generate.ts @@ -52,6 +52,8 @@ export async function* generate( toolResults, isMultimodal: model.multimodal, conversationId: conv._id, + userId: conv.userId, + userEmail: conv.userEmail, })) { // text generation completed if (output.generated_text) { diff --git a/src/lib/types/Conversation.ts b/src/lib/types/Conversation.ts index a1f97978d90..dc29636f5f0 100644 --- a/src/lib/types/Conversation.ts +++ b/src/lib/types/Conversation.ts @@ -9,6 +9,7 @@ export interface Conversation extends Timestamps { sessionId?: string; userId?: User["_id"]; + userEmail?: User["email"]; model: string; embeddingModel: string; diff --git a/src/routes/conversation/+server.ts b/src/routes/conversation/+server.ts index fd0ebc6b83e..84540eba42e 100644 --- a/src/routes/conversation/+server.ts +++ b/src/routes/conversation/+server.ts @@ -109,7 +109,9 @@ export const POST: RequestHandler = async ({ locals, request }) => { updatedAt: new Date(), userAgent: request.headers.get("User-Agent") ?? undefined, embeddingModel, - ...(locals.user ? { userId: locals.user._id } : { sessionId: locals.sessionId }), + ...(locals.user + ? { userId: locals.user._id, userEmail: locals.user.email } + : { sessionId: locals.sessionId }), ...(values.fromShare ? { meta: { fromShareId: values.fromShare } } : {}), });