Skip to content

Suspected logic bug in 'get_optional_params' #10245

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 6 commits into
base: main
Choose a base branch
from
Draft
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
103 changes: 35 additions & 68 deletions litellm/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2688,84 +2688,51 @@ def get_optional_params( # noqa: PLR0915
"thinking": None,
}

# filter out those parameters that were passed with non-default values
# Parameters that can be supplied by the user that we don't want to include in non-default-params.
excluded_non_default_params = {
"additional_drop_params",
"allowed_openai_params",
"api_version",
"custom_llm_provider",
"drop_params",
"messages",
"model",
}

# From the parameters passed into this function, filter for parameters with non-default values.
non_default_params = {
k: v
for k, v in passed_params.items()
if (
k != "model"
and k != "custom_llm_provider"
and k != "api_version"
and k != "drop_params"
and k != "allowed_openai_params"
and k != "additional_drop_params"
and k != "messages"
and k in default_params
k in default_params
and k not in excluded_non_default_params
and v != default_params[k]
and _should_drop_param(k=k, additional_drop_params=additional_drop_params)
is False
and not _should_drop_param(k=k, additional_drop_params=additional_drop_params)
)
}

## raise exception if function calling passed in for a provider that doesn't support it
if (
"functions" in non_default_params
or "function_call" in non_default_params
or "tools" in non_default_params
):
if (
custom_llm_provider == "ollama"
and custom_llm_provider != "text-completion-openai"
and custom_llm_provider != "azure"
and custom_llm_provider != "vertex_ai"
and custom_llm_provider != "anyscale"
and custom_llm_provider != "together_ai"
and custom_llm_provider != "groq"
and custom_llm_provider != "nvidia_nim"
and custom_llm_provider != "cerebras"
and custom_llm_provider != "xai"
and custom_llm_provider != "ai21_chat"
and custom_llm_provider != "volcengine"
and custom_llm_provider != "deepseek"
and custom_llm_provider != "codestral"
and custom_llm_provider != "mistral"
and custom_llm_provider != "anthropic"
and custom_llm_provider != "cohere_chat"
and custom_llm_provider != "cohere"
and custom_llm_provider != "bedrock"
and custom_llm_provider != "ollama_chat"
and custom_llm_provider != "openrouter"
and custom_llm_provider not in litellm.openai_compatible_providers
):
if custom_llm_provider == "ollama":
# ollama actually supports json output
optional_params["format"] = "json"
litellm.add_function_to_prompt = (
True # so that main.py adds the function call to the prompt
)
if "tools" in non_default_params:
optional_params[
"functions_unsupported_model"
] = non_default_params.pop("tools")
non_default_params.pop(
"tool_choice", None
) # causes ollama requests to hang
elif "functions" in non_default_params:
optional_params[
"functions_unsupported_model"
] = non_default_params.pop("functions")
elif (
litellm.add_function_to_prompt
): # if user opts to add it to prompt instead
optional_params["functions_unsupported_model"] = non_default_params.pop(
"tools", non_default_params.pop("functions", None)
)
else:
raise UnsupportedParamsError(
status_code=500,
message=f"Function calling is not supported by {custom_llm_provider}.",
)
if any(param_name in non_default_params for param_name in ("functions", "function_call", "tools")):
functions_unsupported_model_key = "functions_unsupported_model"

# Handle Ollama as a special case (ollama actually supports JSON output)
if custom_llm_provider == "ollama":
optional_params["format"] = "json"
litellm.add_function_to_prompt = True # so that main.py adds the function call to the prompt
non_default_params.pop("tool_choice", None) # causes ollama requests to hang

# Handle all other providers that are not OpenAI-compatible
if litellm.add_function_to_prompt and (custom_llm_provider not in litellm.openai_compatible_providers):
# Attempt to add the supplied function call to the prompt, preferring tools > functions > function_call
function_call_value = non_default_params.pop("tools",
non_default_params.pop("functions",
non_default_params.pop("function_call", None)))
optional_params[functions_unsupported_model_key] = function_call_value
else:
raise UnsupportedParamsError(
status_code=500,
message=f"Function calling is not supported by {custom_llm_provider}.",
)

provider_config: Optional[BaseConfig] = None
if custom_llm_provider is not None and custom_llm_provider in [
Expand Down
Loading