File tree Expand file tree Collapse file tree 1 file changed +1
-1
lines changed Expand file tree Collapse file tree 1 file changed +1
-1
lines changed Original file line number Diff line number Diff line change @@ -41,7 +41,7 @@ class MyNameResponse(BaseModel):
41
41
system_prompt = getattr (return_type , "system_prompt" , "" ).strip ()
42
42
if not llm_supports_response_format or config .llm .startswith ("llama-cpp-python" ):
43
43
system_prompt += f"\n \n Format your response according to this JSON schema:\n { return_type .model_json_schema ()!s} "
44
- # Constrain the reponse format to the JSON schema if it's supported by the LLM [1]. Strict mode
44
+ # Constrain the response format to the JSON schema if it's supported by the LLM [1]. Strict mode
45
45
# is disabled by default because it only supports a subset of JSON schema features [2].
46
46
# [1] https://docs.litellm.ai/docs/completion/json_mode
47
47
# [2] https://platform.openai.com/docs/guides/structured-outputs#some-type-specific-keywords-are-not-yet-supported
You can’t perform that action at this time.
0 commit comments