From bb834606fc502d8ed969ae8fbd392cfb059ae8b1 Mon Sep 17 00:00:00 2001 From: Andrew Gross Date: Mon, 10 Mar 2025 13:46:17 +0000 Subject: [PATCH] Add QWQ-32b prompt format --- backend/prompts.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/backend/prompts.py b/backend/prompts.py index f2e7b6ad..c527fe89 100644 --- a/backend/prompts.py +++ b/backend/prompts.py @@ -290,6 +290,25 @@ def format(self, prompt, response, system_prompt, settings): def context_bos(self): return True +class PromptFormat_qwq(PromptFormat_chatml): + + description = "QWQ format with tokens" + + def format(self, prompt, response, system_prompt, settings): + text = "" + if system_prompt and system_prompt.strip() != "": + text += "<|im_start|>system\n" + text += system_prompt + text += "\n<|im_end|>\n" + text += "<|im_start|>user\n" + text += prompt + text += "<|im_end|>\n" + text += "<|im_start|>assistant\n\n" + if response: + text += response + text += "<|im_end|>\n" + return text + class PromptFormat_tinyllama(PromptFormat_chatml): @@ -536,6 +555,7 @@ def context_bos(self): prompt_formats = \ { "Chat-RP": PromptFormat_raw, + "QwQ-32b": PromptFormat_qwq, "Llama-chat": PromptFormat_llama, "Llama3-instruct": PromptFormat_llama3, "ChatML": PromptFormat_chatml,