Skip to content

Commit 26886cc

Browse files
oskarhanestellasia
authored andcommitted
Add tool calling to the LLM base class, implement in OpenAI
1 parent 85eaa5b commit 26886cc

File tree

1 file changed

+95
-0
lines changed

1 file changed

+95
-0
lines changed
Lines changed: 95 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,95 @@
1+
"""
2+
Example showing how to use OpenAI tool calls with parameter extraction.
3+
Both synchronous and asynchronous examples are provided.
4+
5+
To run this example:
6+
1. Make sure you have the OpenAI API key in your .env file:
7+
OPENAI_API_KEY=your-api-key
8+
2. Run: python examples/tool_calls/openai_tool_calls.py
9+
"""
10+
11+
import asyncio
12+
import json
13+
import os
14+
from typing import Dict, Any
15+
16+
from dotenv import load_dotenv
17+
18+
from neo4j_graphrag.llm import OpenAILLM
19+
from neo4j_graphrag.llm.types import ToolCallResponse
20+
21+
# Load environment variables from .env file
22+
load_dotenv()
23+
24+
# Define a tool for extracting information from text
25+
TOOLS = [
26+
{
27+
"type": "function",
28+
"function": {
29+
"name": "extract_person_info",
30+
"description": "Extract information about a person from text",
31+
"parameters": {
32+
"type": "object",
33+
"properties": {
34+
"name": {"type": "string", "description": "The person's full name"},
35+
"age": {"type": "integer", "description": "The person's age"},
36+
"occupation": {
37+
"type": "string",
38+
"description": "The person's occupation",
39+
},
40+
},
41+
"required": ["name"],
42+
},
43+
},
44+
}
45+
]
46+
47+
48+
def process_tool_call(response: ToolCallResponse) -> Dict[str, Any]:
49+
"""Process the tool call response and return the extracted parameters."""
50+
if not response.tool_calls:
51+
raise ValueError("No tool calls found in response")
52+
53+
tool_call = response.tool_calls[0]
54+
print(f"\nTool called: {tool_call.name}")
55+
print(f"Arguments: {tool_call.arguments}")
56+
print(f"Additional content: {response.content or 'None'}")
57+
return tool_call.arguments
58+
59+
60+
async def main() -> None:
61+
# Initialize the OpenAI LLM
62+
llm = OpenAILLM(
63+
api_key=os.getenv("OPENAI_API_KEY"),
64+
model_name="gpt-4o",
65+
model_params={"temperature": 0},
66+
)
67+
68+
# Example text containing information about a person
69+
text = "Stella Hane is a 35-year-old software engineer who loves coding."
70+
71+
print("\n=== Synchronous Tool Call ===")
72+
# Make a synchronous tool call
73+
sync_response = llm.invoke_with_tools(
74+
input=f"Extract information about the person from this text: {text}",
75+
tools=TOOLS,
76+
)
77+
sync_result = process_tool_call(sync_response)
78+
print("\n=== Synchronous Tool Call Result ===")
79+
print(json.dumps(sync_result, indent=2))
80+
81+
print("\n=== Asynchronous Tool Call ===")
82+
# Make an asynchronous tool call with a different text
83+
text2 = "Molly Hane, 32, works as a data scientist and enjoys machine learning."
84+
async_response = await llm.ainvoke_with_tools(
85+
input=f"Extract information about the person from this text: {text2}",
86+
tools=TOOLS,
87+
)
88+
async_result = process_tool_call(async_response)
89+
print("\n=== Asynchronous Tool Call Result ===")
90+
print(json.dumps(async_result, indent=2))
91+
92+
93+
if __name__ == "__main__":
94+
# Run the async main function
95+
asyncio.run(main())

0 commit comments

Comments
 (0)