llm.bind_tools and tool is BaseTool . Erro Object of type 'ModelMetaclass' is not JSON serializable #25698
-
Checked other resources
Commit to Help
Example Codepython ...
from langchain_openai import ChatOpenAI
from langchain_core.tools import BaseTool
from typing import Optional, Type
from langchain.pydantic_v1 import BaseModel, Field
from langchain_community.chat_models import ChatOllama
from langchain_core.callbacks import (
AsyncCallbackManagerForToolRun,
CallbackManagerForToolRun,
)
model = ChatOpenAI(
api_key="ollama",
model="qwen2:7b",
base_url="http://127.0.0.1:11434/v1",
)
class CalculatorInput(BaseModel):
a: int = Field(description="first number")
b: int = Field(description="second number")
class CustomCalculatorTool(BaseTool):
name = "Calculator"
description = "useful for when you need to answer questions about math"
args_schema: Type[BaseModel] = CalculatorInput
return_direct: bool = False
def _run(
self, a: int, b: int, run_manager: Optional[CallbackManagerForToolRun] = None
) -> str:
"""Use the tool."""
return f"{a * b}"
async def _arun(
self,
a: int,
b: int,
run_manager: Optional[AsyncCallbackManagerForToolRun] = None,
) -> str:
"""Use the tool asynchronously."""
# If the calculation is cheap, you can just delegate to the sync implementation
# as shown below.
# If the sync calculation is expensive, you should delete the entire _arun method.
# LangChain will automatically provide a better implementation that will
# kick off the task in a thread to make sure it doesn't block other async code.
return self._run(a, b, run_manager=run_manager.get_sync())
class TodayInput(BaseModel):
query: str = Field(description="query", default=None)
class TodayTool(BaseTool):
name = "today"
description = "如果你想知道今天的日期,这是一个很不错的工具"
args_schema: Type[BaseModel] = TodayInput
return_direct: bool = False
def _run(
self, query: str, run_manager: Optional[CallbackManagerForToolRun] = None
) -> str:
"""Use the tool."""
return f"{datetime.datetime.now()}"
async def _arun(
self, query: str, run_manager: Optional[AsyncCallbackManagerForToolRun] = None
) -> str:
"""Use the tool asynchronously."""
# If the calculation is cheap, you can just delegate to the sync implementation
# as shown below.
# If the sync calculation is expensive, you should delete the entire _arun method.
# LangChain will automatically provide a better implementation that will
return f"{datetime.datetime.now()}"
import langchain
langchain.debug = True
tools = [CustomCalculatorTool, TodayTool]
model.bind_tools(tools=tools)
print(model.invoke("今天日期是什么"))
... DescriptionTraceback (most recent call last): Process finished with exit code 1 System Infolangchain==0.2.11 |
Beta Was this translation helpful? Give feedback.
Replies: 2 comments 4 replies
-
@zh19990906 Hello there! I'm here to help you with your coding conundrum. Let's squash that bug together! 🤖 I found a similar solved discussion that might help: Cannot create custom tools due to type errors including documentation examples. Updating from Python 3.10 to 3.11 resolved the issue, although the exact cause is still unclear [1]. To resolve the "Object of type 'ModelMetaclass' is not JSON serializable" error when using Here's a snippet from the def bind_tools(
self,
tools: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable, BaseTool]],
*,
tool_choice: Optional[Union[Dict[str, Dict], bool, str]] = None,
**kwargs: Any,
) -> Runnable[LanguageModelInput, BaseMessage]:
"""Bind tool-like objects to this chat model"""
formatted_tools = [convert_to_openai_tool(tool) for tool in tools]
tool_names = [ft["function"]["name"] for ft in formatted_tools]
if tool_choice:
if isinstance(tool_choice, dict):
if not any(
tool_choice["function"]["name"] == name for name in tool_names
):
raise ValueError(
f"Tool choice {tool_choice=} was specified, but the only "
f"provided tools were {tool_names}."
)
elif isinstance(tool_choice, str):
chosen = [
f for f in formatted_tools if f["function"]["name"] == tool_choice
]
if not chosen:
raise ValueError(
f"Tool choice {tool_choice=} was specified, but the only "
f"provided tools were {tool_names}."
)
elif isinstance(tool_choice, bool):
if len(formatted_tools) > 1:
raise ValueError(
"tool_choice=True can only be specified when a single tool is "
f"passed in. Received {len(tools)} tools."
)
tool_choice = formatted_tools[0]
else:
raise ValueError(
"""Unrecognized tool_choice type. Expected dict having format like
this {"type": "function", "function": {"name": <<tool_name>>}}"""
f"Received: {tool_choice}"
)
kwargs["tool_choice"] = tool_choice
formatted_tools = [convert_to_openai_tool(tool) for tool in tools]
return super().bind(tools=formatted_tools, **kwargs) Ensure that the tools you are passing to Additionally, LangChain provides a from langchain_core.pydantic_v1 import BaseModel
from langchain_core.load.serializable import Serializable
class MyCustomTool(BaseModel, Serializable):
# Define your custom tool attributes and methods here
@classmethod
def is_lc_serializable(cls) -> bool:
return True
@classmethod
def get_lc_namespace(cls) -> List[str]:
return cls.__module__.split(".")
@property
def lc_secrets(self) -> Dict[str, str]:
return {}
@property
def lc_attributes(self) -> Dict:
return {}
def to_json(self) -> Union[SerializedConstructor, SerializedNotImplemented]:
return super().to_json() By inheriting from Furthermore, ensure that your from langchain_core.pydantic_v1 import BaseModel
from langchain_core.tools.base import BaseTool
class MyTool(BaseTool):
name: str = "my_tool"
description: str = "This is my custom tool."
args_schema: Optional[Type[BaseModel]] = None
def _run(self, *args: Any, **kwargs: Any) -> Any:
# Implement the tool's functionality here
pass
async def _arun(self, *args: Any, **kwargs: Any) -> Any:
# Implement the tool's async functionality here
pass Ensure that |
Beta Was this translation helpful? Give feedback.
-
you should be passing in the tools initialized, eg: |
Beta Was this translation helpful? Give feedback.
you should be passing in the tools initialized, eg:
tools = [CustomCalculatorTool(), TodayTool()]