From c3916817a025fd4c69855bed6be8bc5193221df3 Mon Sep 17 00:00:00 2001 From: "Clelia (Astra) Bertelli" Date: Thu, 16 Oct 2025 16:30:36 +0200 Subject: [PATCH] feat: integrate anthropic with tool call block --- .../llama_index/llms/anthropic/base.py | 139 ++++++++++++------ .../llama_index/llms/anthropic/utils.py | 25 +++- .../llama-index-llms-anthropic/pyproject.toml | 4 +- .../tests/test_llms_anthropic.py | 47 +++++- .../llms/llama-index-llms-anthropic/uv.lock | 12 +- 5 files changed, 169 insertions(+), 58 deletions(-) diff --git a/llama-index-integrations/llms/llama-index-llms-anthropic/llama_index/llms/anthropic/base.py b/llama-index-integrations/llms/llama-index-llms-anthropic/llama_index/llms/anthropic/base.py index 39902819ce..2e51243e78 100644 --- a/llama-index-integrations/llms/llama-index-llms-anthropic/llama_index/llms/anthropic/base.py +++ b/llama-index-integrations/llms/llama-index-llms-anthropic/llama_index/llms/anthropic/base.py @@ -13,6 +13,7 @@ Set, Tuple, Union, + cast, ) from llama_index.core.base.llms.types import ( @@ -23,6 +24,7 @@ LLMMetadata, MessageRole, ContentBlock, + ToolCallBlock, ) from llama_index.core.base.llms.types import TextBlock as LITextBlock from llama_index.core.base.llms.types import CitationBlock as LICitationBlock @@ -351,8 +353,7 @@ def _completion_response_from_chat_response( def _get_blocks_and_tool_calls_and_thinking( self, response: Any - ) -> Tuple[List[ContentBlock], List[Dict[str, Any]], List[Dict[str, Any]]]: - tool_calls = [] + ) -> Tuple[List[ContentBlock], List[Dict[str, Any]]]: blocks: List[ContentBlock] = [] citations: List[TextCitation] = [] tracked_citations: Set[str] = set() @@ -392,9 +393,15 @@ def _get_blocks_and_tool_calls_and_thinking( ) ) elif isinstance(content_block, ToolUseBlock): - tool_calls.append(content_block.model_dump()) + blocks.append( + ToolCallBlock( + tool_call_id=content_block.id, + tool_kwargs=cast(Dict[str, Any] | str, content_block.input), + tool_name=content_block.name, + ) + ) - return blocks, tool_calls, [x.model_dump() for x in citations] + return blocks, [x.model_dump() for x in citations] @llm_chat_callback() def chat( @@ -412,17 +419,12 @@ def chat( **all_kwargs, ) - blocks, tool_calls, citations = self._get_blocks_and_tool_calls_and_thinking( - response - ) + blocks, citations = self._get_blocks_and_tool_calls_and_thinking(response) return AnthropicChatResponse( message=ChatMessage( role=MessageRole.ASSISTANT, blocks=blocks, - additional_kwargs={ - "tool_calls": tool_calls, - }, ), citations=citations, raw=dict(response), @@ -532,13 +534,26 @@ def gen() -> Generator[AnthropicChatResponse, None, None]: else: tool_calls_to_send = cur_tool_calls + for tool_call in tool_calls_to_send: + if tool_call.id not in [ + block.tool_call_id + for block in content + if isinstance(block, ToolCallBlock) + ]: + content.append( + ToolCallBlock( + tool_call_id=tool_call.id, + tool_name=tool_call.name, + tool_kwargs=cast( + Dict[str, Any] | str, tool_call.input + ), + ) + ) + yield AnthropicChatResponse( message=ChatMessage( role=role, blocks=content, - additional_kwargs={ - "tool_calls": [t.dict() for t in tool_calls_to_send] - }, ), citations=cur_citations, delta=content_delta, @@ -556,13 +571,31 @@ def gen() -> Generator[AnthropicChatResponse, None, None]: content.append(cur_block) cur_block = None + if cur_tool_call is not None: + tool_calls_to_send = [*cur_tool_calls, cur_tool_call] + else: + tool_calls_to_send = cur_tool_calls + + for tool_call in tool_calls_to_send: + if tool_call.id not in [ + block.tool_call_id + for block in content + if isinstance(block, ToolCallBlock) + ]: + content.append( + ToolCallBlock( + tool_call_id=tool_call.id, + tool_name=tool_call.name, + tool_kwargs=cast( + Dict[str, Any] | str, tool_call.input + ), + ) + ) + yield AnthropicChatResponse( message=ChatMessage( role=role, blocks=content, - additional_kwargs={ - "tool_calls": [t.dict() for t in tool_calls_to_send] - }, ), citations=cur_citations, delta=content_delta, @@ -600,17 +633,12 @@ async def achat( **all_kwargs, ) - blocks, tool_calls, citations = self._get_blocks_and_tool_calls_and_thinking( - response - ) + blocks, citations = self._get_blocks_and_tool_calls_and_thinking(response) return AnthropicChatResponse( message=ChatMessage( role=MessageRole.ASSISTANT, blocks=blocks, - additional_kwargs={ - "tool_calls": tool_calls, - }, ), citations=citations, raw=dict(response), @@ -720,13 +748,26 @@ async def gen() -> ChatResponseAsyncGen: else: tool_calls_to_send = cur_tool_calls + for tool_call in tool_calls_to_send: + if tool_call.id not in [ + block.tool_call_id + for block in content + if isinstance(block, ToolCallBlock) + ]: + content.append( + ToolCallBlock( + tool_call_id=tool_call.id, + tool_name=tool_call.name, + tool_kwargs=cast( + Dict[str, Any] | str, tool_call.input + ), + ) + ) + yield AnthropicChatResponse( message=ChatMessage( role=role, blocks=content, - additional_kwargs={ - "tool_calls": [t.dict() for t in tool_calls_to_send] - }, ), citations=cur_citations, delta=content_delta, @@ -744,13 +785,31 @@ async def gen() -> ChatResponseAsyncGen: content.append(cur_block) cur_block = None + if cur_tool_call is not None: + tool_calls_to_send = [*cur_tool_calls, cur_tool_call] + else: + tool_calls_to_send = cur_tool_calls + + for tool_call in tool_calls_to_send: + if tool_call.id not in [ + block.tool_call_id + for block in content + if isinstance(block, ToolCallBlock) + ]: + content.append( + ToolCallBlock( + tool_call_id=tool_call.id, + tool_name=tool_call.name, + tool_kwargs=cast( + Dict[str, Any] | str, tool_call.input + ), + ) + ) + yield AnthropicChatResponse( message=ChatMessage( role=role, blocks=content, - additional_kwargs={ - "tool_calls": [t.dict() for t in tool_calls_to_send] - }, ), citations=cur_citations, delta=content_delta, @@ -859,7 +918,11 @@ def get_tool_calls_from_response( **kwargs: Any, ) -> List[ToolSelection]: """Predict and call the tool.""" - tool_calls = response.message.additional_kwargs.get("tool_calls", []) + tool_calls = [ + block + for block in response.message.blocks + if isinstance(block, ToolCallBlock) + ] if len(tool_calls) < 1: if error_on_no_tool_call: @@ -871,24 +934,16 @@ def get_tool_calls_from_response( tool_selections = [] for tool_call in tool_calls: - if ( - "input" not in tool_call - or "id" not in tool_call - or "name" not in tool_call - ): - raise ValueError("Invalid tool call.") - if tool_call["type"] != "tool_use": - raise ValueError("Invalid tool type. Unsupported by Anthropic") argument_dict = ( - json.loads(tool_call["input"]) - if isinstance(tool_call["input"], str) - else tool_call["input"] + json.loads(tool_call.tool_kwargs) + if isinstance(tool_call.tool_kwargs, str) + else tool_call.tool_kwargs ) tool_selections.append( ToolSelection( - tool_id=tool_call["id"], - tool_name=tool_call["name"], + tool_id=tool_call.tool_call_id or "", + tool_name=tool_call.tool_name, tool_kwargs=argument_dict, ) ) diff --git a/llama-index-integrations/llms/llama-index-llms-anthropic/llama_index/llms/anthropic/utils.py b/llama-index-integrations/llms/llama-index-llms-anthropic/llama_index/llms/anthropic/utils.py index 91dc52a018..6b284fdd40 100644 --- a/llama-index-integrations/llms/llama-index-llms-anthropic/llama_index/llms/anthropic/utils.py +++ b/llama-index-integrations/llms/llama-index-llms-anthropic/llama_index/llms/anthropic/utils.py @@ -16,6 +16,7 @@ CitationBlock, ThinkingBlock, ContentBlock, + ToolCallBlock, ) from anthropic.types import ( @@ -24,6 +25,7 @@ DocumentBlockParam, ThinkingBlockParam, ImageBlockParam, + ToolUseBlockParam, CacheControlEphemeralParam, Base64PDFSourceParam, ) @@ -269,6 +271,18 @@ def blocks_to_anthropic_blocks( if global_cache_control: anthropic_blocks[-1]["cache_control"] = global_cache_control + elif isinstance(block, ToolCallBlock): + anthropic_blocks.append( + ToolUseBlockParam( + id=block.tool_call_id or "", + input=block.tool_kwargs, + name=block.tool_name, + type="tool_use", + ) + ) + if global_cache_control: + anthropic_blocks[-1]["cache_control"] = global_cache_control + elif isinstance(block, CachePoint): if len(anthropic_blocks) > 0: anthropic_blocks[-1]["cache_control"] = CacheControlEphemeralParam( @@ -282,6 +296,7 @@ def blocks_to_anthropic_blocks( else: raise ValueError(f"Unsupported block type: {type(block)}") + # keep this code for compatibility with older chat histories tool_calls = kwargs.get("tool_calls", []) for tool_call in tool_calls: assert "id" in tool_call @@ -359,9 +374,15 @@ def messages_to_anthropic_messages( def force_single_tool_call(response: ChatResponse) -> None: - tool_calls = response.message.additional_kwargs.get("tool_calls", []) + tool_calls = [ + block for block in response.message.blocks if isinstance(block, ToolCallBlock) + ] if len(tool_calls) > 1: - response.message.additional_kwargs["tool_calls"] = [tool_calls[0]] + response.message.blocks = [ + block + for block in response.message.blocks + if not isinstance(block, ToolCallBlock) + ] + [tool_calls[0]] # Anthropic models that support prompt caching diff --git a/llama-index-integrations/llms/llama-index-llms-anthropic/pyproject.toml b/llama-index-integrations/llms/llama-index-llms-anthropic/pyproject.toml index 1f82e568d5..43f195b6ca 100644 --- a/llama-index-integrations/llms/llama-index-llms-anthropic/pyproject.toml +++ b/llama-index-integrations/llms/llama-index-llms-anthropic/pyproject.toml @@ -27,7 +27,7 @@ dev = [ [project] name = "llama-index-llms-anthropic" -version = "0.9.5" +version = "0.10.0" description = "llama-index llms anthropic integration" authors = [{name = "Your Name", email = "you@example.com"}] requires-python = ">=3.9,<4.0" @@ -35,7 +35,7 @@ readme = "README.md" license = "MIT" dependencies = [ "anthropic[bedrock, vertex]>=0.69.0", - "llama-index-core>=0.14.3,<0.15", + "llama-index-core>=0.14.5,<0.15", ] [tool.codespell] diff --git a/llama-index-integrations/llms/llama-index-llms-anthropic/tests/test_llms_anthropic.py b/llama-index-integrations/llms/llama-index-llms-anthropic/tests/test_llms_anthropic.py index 4f37d7f98c..b4ecd3a317 100644 --- a/llama-index-integrations/llms/llama-index-llms-anthropic/tests/test_llms_anthropic.py +++ b/llama-index-integrations/llms/llama-index-llms-anthropic/tests/test_llms_anthropic.py @@ -8,7 +8,7 @@ from pydantic import BaseModel from llama_index.core.prompts import PromptTemplate from llama_index.core.base.llms.base import BaseLLM -from llama_index.core.llms import ( +from llama_index.core.base.llms.types import ( ChatMessage, DocumentBlock, TextBlock, @@ -16,6 +16,7 @@ ChatResponse, CachePoint, CacheControl, + ToolCallBlock, ) from llama_index.core.base.llms.types import ThinkingBlock from llama_index.core.tools import FunctionTool @@ -244,7 +245,7 @@ def pdf_url() -> str: def test_tool_required(): llm = Anthropic(model="claude-3-5-sonnet-latest") - search_tool = FunctionTool.from_defaults(fn=search) + search_tool = FunctionTool.from_defaults(fn=search, name="search") # Test with tool_required=True response = llm.chat_with_tools( @@ -253,8 +254,24 @@ def test_tool_required(): tool_required=True, ) assert isinstance(response, AnthropicChatResponse) - assert response.message.additional_kwargs["tool_calls"] is not None - assert len(response.message.additional_kwargs["tool_calls"]) > 0 + assert ( + len( + [ + block + for block in response.message.blocks + if isinstance(block, ToolCallBlock) + ] + ) + > 0 + ) + assert ( + any( + block.tool_name == "search" + for block in response.message.blocks + if isinstance(block, ToolCallBlock) + ) + > 0 + ) # Test with tool_required=False response = llm.chat_with_tools( @@ -264,7 +281,16 @@ def test_tool_required(): ) assert isinstance(response, AnthropicChatResponse) # Should not use tools for a simple greeting - assert not response.message.additional_kwargs.get("tool_calls") + assert ( + len( + [ + block + for block in response.message.blocks + if isinstance(block, ToolCallBlock) + ] + ) + == 0 + ) # should not blow up with no tools (regression test) response = llm.chat_with_tools( @@ -273,7 +299,16 @@ def test_tool_required(): tool_required=False, ) assert isinstance(response, AnthropicChatResponse) - assert not response.message.additional_kwargs.get("tool_calls") + assert ( + len( + [ + block + for block in response.message.blocks + if isinstance(block, ToolCallBlock) + ] + ) + == 0 + ) @pytest.mark.skipif( diff --git a/llama-index-integrations/llms/llama-index-llms-anthropic/uv.lock b/llama-index-integrations/llms/llama-index-llms-anthropic/uv.lock index 363940b1d5..c249a92c0e 100644 --- a/llama-index-integrations/llms/llama-index-llms-anthropic/uv.lock +++ b/llama-index-integrations/llms/llama-index-llms-anthropic/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 3 +revision = 2 requires-python = ">=3.9, <4.0" resolution-markers = [ "python_full_version >= '3.14'", @@ -1865,7 +1865,7 @@ wheels = [ [[package]] name = "llama-index-core" -version = "0.14.3" +version = "0.14.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, @@ -1901,9 +1901,9 @@ dependencies = [ { name = "typing-inspect" }, { name = "wrapt" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c5/e4/6a4ab9465b66c9d31b74ed0221293aeebe9072ec9db3b3b229f96028af78/llama_index_core-0.14.3.tar.gz", hash = "sha256:ca8a473ac92fe54f2849175f6510655999852c83fa8b7d75fd3908a8863da05a", size = 11577791, upload-time = "2025-09-24T18:21:03.653Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cf/42/e1de7d6a390dcd67b0754fd24e0d0acb56c1d0838a68e30671dd79fd5521/llama_index_core-0.14.5.tar.gz", hash = "sha256:913ebc3ad895d381eaab0f10dc405101c5bec5a70c09909ef2493ddc115f8552", size = 11578206, upload-time = "2025-10-15T19:10:09.746Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/5a/de1002b10109a0dfa122ba84a3b640124cf2418a78e00ac0b382574f2b3f/llama_index_core-0.14.3-py3-none-any.whl", hash = "sha256:fc4291fbae8c6609e3367da39a85a453099476685d5a3e97b766d82d4bcce5a4", size = 11918952, upload-time = "2025-09-24T18:21:00.744Z" }, + { url = "https://files.pythonhosted.org/packages/0f/64/c02576991efcefd30a65971e87ece7494d6bbf3739b7bffeeb56c86b5a76/llama_index_core-0.14.5-py3-none-any.whl", hash = "sha256:5445aa322b83a9d48baa608c3b920df4f434ed5d461a61e6bccb36d99348bddf", size = 11919461, upload-time = "2025-10-15T19:10:06.92Z" }, ] [[package]] @@ -1921,7 +1921,7 @@ wheels = [ [[package]] name = "llama-index-llms-anthropic" -version = "0.9.4" +version = "0.10.0" source = { editable = "." } dependencies = [ { name = "anthropic", extra = ["bedrock", "vertex"] }, @@ -1954,7 +1954,7 @@ dev = [ [package.metadata] requires-dist = [ { name = "anthropic", extras = ["bedrock", "vertex"], specifier = ">=0.69.0" }, - { name = "llama-index-core", specifier = ">=0.14.3,<0.15" }, + { name = "llama-index-core", specifier = ">=0.14.5,<0.15" }, ] [package.metadata.requires-dev]