Skip to content

Commit a018cd5

Browse files
authored
Fix tool definition type hint inconsistency (#14)
1 parent a64d3a3 commit a018cd5

File tree

2 files changed

+7
-4
lines changed

2 files changed

+7
-4
lines changed

src/lmstudio/json_api.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -162,6 +162,7 @@
162162
"PredictionRoundResult",
163163
"PromptProcessingCallback",
164164
"SerializedLMSExtendedError",
165+
"ToolDefinition",
165166
"ToolFunctionDef",
166167
"ToolFunctionDefDict",
167168
]
@@ -1361,6 +1362,9 @@ def _additional_config_options(cls) -> DictObject:
13611362
return {"for_text_completion": True}
13621363

13631364

1365+
ToolDefinition: TypeAlias = ToolFunctionDef | ToolFunctionDefDict | Callable[..., Any]
1366+
1367+
13641368
class ChatResponseEndpoint(PredictionEndpoint[TPrediction]):
13651369
"""API channel endpoint for requesting a chat response from a model."""
13661370

@@ -1371,7 +1375,7 @@ class ChatResponseEndpoint(PredictionEndpoint[TPrediction]):
13711375
# TODO: Consider implementing this conversion in _kv_config.py
13721376
@staticmethod
13731377
def parse_tools(
1374-
tools: Iterable[ToolFunctionDef | ToolFunctionDefDict | Callable[..., Any]],
1378+
tools: Iterable[ToolDefinition],
13751379
) -> tuple[LlmToolUseSettingToolArray, ClientToolMap]:
13761380
"""Split tool function definitions into server and client details."""
13771381
if not tools:

src/lmstudio/sync_api.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -103,8 +103,7 @@
103103
RemoteCallHandler,
104104
TModelInfo,
105105
TPrediction,
106-
ToolFunctionDef,
107-
ToolFunctionDefDict,
106+
ToolDefinition,
108107
check_model_namespace,
109108
load_struct,
110109
_model_spec_to_api_dict,
@@ -1547,7 +1546,7 @@ def respond(
15471546
def act(
15481547
self,
15491548
chat: Chat | ChatHistoryDataDict | str,
1550-
tools: Iterable[ToolFunctionDef | ToolFunctionDefDict],
1549+
tools: Iterable[ToolDefinition],
15511550
*,
15521551
max_prediction_rounds: int | None = None,
15531552
config: LlmPredictionConfig | LlmPredictionConfigDict | None = None,

0 commit comments

Comments
 (0)