Skip to content

Commit 685695d

Browse files
authored
fix: Tools detection for OpenAI o1 + LLM Tools/Functions merging (#2161)
* Fix tools detection for OpenAI o1 * Fix merging of LLM config producing both tools and functions
1 parent fa2be6f commit 685695d

File tree

2 files changed

+37
-4
lines changed

2 files changed

+37
-4
lines changed

autogen/oai/client.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@
3131
from ..logger.logger_utils import get_current_ts
3232
from ..runtime_logging import log_chat_completion, log_new_client, log_new_wrapper, logging_enabled
3333
from ..token_count_utils import count_token
34-
from .client_utils import FormatterProtocol, logging_formatter
34+
from .client_utils import FormatterProtocol, logging_formatter, merge_config_with_tools
3535
from .openai_utils import OAI_PRICE1K, get_key, is_valid_api_key
3636

3737
TOOL_ENABLED = False
@@ -639,8 +639,11 @@ def _create_or_parse(*args, **kwargs):
639639
warnings.warn(
640640
f"The {params.get('model')} model does not support streaming. The stream will be set to False."
641641
)
642-
if params.get("tools", False):
643-
raise ModelToolNotSupportedError(params.get("model"))
642+
if "tools" in params:
643+
if params["tools"]: # If tools exist, raise as unsupported
644+
raise ModelToolNotSupportedError(params.get("model"))
645+
else:
646+
params.pop("tools") # Remove empty tools list
644647
self._process_reasoning_model_params(params)
645648
params["stream"] = False
646649
response = create_or_parse(**params)
@@ -1083,7 +1086,7 @@ def create(self, **config: Any) -> ModelClient.ModelClientResponseProtocol:
10831086
for i in ordered_clients_indices:
10841087
# merge the input config with the i-th config in the config list
10851088
client_config = self._config_list[i]
1086-
full_config = {**config, **client_config, "tools": config.get("tools", []) + client_config.get("tools", [])}
1089+
full_config = merge_config_with_tools(config, client_config)
10871090

10881091
# separate the config into create_config and extra_kwargs
10891092
create_config, extra_kwargs = self._separate_create_config(full_config)

autogen/oai/client_utils.py

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -110,6 +110,36 @@ def validate_parameter(
110110
return param_value
111111

112112

113+
def merge_config_with_tools(config: dict[str, Any], client_config: dict[str, Any]) -> dict[str, Any]:
114+
"""Merge configuration dictionaries with proper tools and functions handling.
115+
116+
This function merges two configuration dictionaries while ensuring that:
117+
1. Empty 'tools' arrays are not added unnecessarily
118+
2. 'tools' and deprecated 'functions' parameters are not both present
119+
3. Actual tool configurations are properly merged
120+
121+
Args:
122+
config: The base configuration dictionary (e.g., from create() call)
123+
client_config: The client-specific configuration dictionary (e.g., from config_list)
124+
125+
Returns:
126+
dict[str, Any]: The merged configuration with proper tools/functions handling
127+
"""
128+
# Start with a clean merge of both configs
129+
full_config = {**config, **client_config}
130+
131+
# Add tools if tools contains something AND are not using deprecated functions
132+
config_tools = config.get("tools", [])
133+
client_tools = client_config.get("tools", [])
134+
135+
if config_tools or client_tools:
136+
# Don't add tools if functions parameter is present (deprecated API)
137+
if "functions" not in full_config:
138+
full_config["tools"] = config_tools + client_tools
139+
140+
return full_config
141+
142+
113143
def should_hide_tools(messages: list[dict[str, Any]], tools: list[dict[str, Any]], hide_tools_param: str) -> bool:
114144
"""Determines if tools should be hidden. This function is used to hide tools when they have been run, minimising the chance of the LLM choosing them when they shouldn't.
115145
Parameters:

0 commit comments

Comments
 (0)