Skip to content

Commit aee73c4

Browse files
[MCP] Tiny Agents in Python (#3098)
* first draft of tiny agents in python * server types * nits * better typing * nit * refactor a bit * revert * add constants * Update src/huggingface_hub/inference/_mcp/utils.py Co-authored-by: Julien Chaumond <julien@huggingface.co> * Update src/huggingface_hub/inference/_mcp/tiny_agent.py Co-authored-by: Julien Chaumond <julien@huggingface.co> * style * rename and use get_token() helper * typer and load agents from the Hub * remove local agents * update default provider * remove unnecessary args * fixes * better exit? * fix --------- Co-authored-by: Julien Chaumond <julien@huggingface.co>
1 parent 10d4494 commit aee73c4

File tree

5 files changed

+359
-4
lines changed

5 files changed

+359
-4
lines changed

setup.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,7 @@ def get_version() -> str:
6666

6767
extras["mcp"] = [
6868
"mcp>=1.8.0",
69+
"typer",
6970
] + extras["inference"]
7071

7172
extras["testing"] = (
@@ -130,7 +131,10 @@ def get_version() -> str:
130131
packages=find_packages("src"),
131132
extras_require=extras,
132133
entry_points={
133-
"console_scripts": ["huggingface-cli=huggingface_hub.commands.huggingface_cli:main"],
134+
"console_scripts": [
135+
"huggingface-cli=huggingface_hub.commands.huggingface_cli:main",
136+
"tiny-agents=huggingface_hub.inference._mcp.cli:app",
137+
],
134138
"fsspec.specs": "hf=huggingface_hub.HfFileSystem",
135139
},
136140
python_requires=">=3.8.0",
Lines changed: 71 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,71 @@
1+
from __future__ import annotations
2+
3+
import asyncio
4+
from typing import AsyncGenerator, Dict, Iterable, List, Optional, Union
5+
6+
from huggingface_hub import ChatCompletionInputMessage, ChatCompletionStreamOutput, MCPClient
7+
8+
from .._providers import PROVIDER_OR_POLICY_T
9+
from .constants import DEFAULT_SYSTEM_PROMPT, EXIT_LOOP_TOOLS, MAX_NUM_TURNS
10+
11+
12+
class Agent(MCPClient):
13+
"""
14+
Python implementation of a Simple Agent
15+
i.e. just a basic while loop on top of an Inference Client with MCP-powered tools
16+
"""
17+
18+
def __init__(
19+
self,
20+
*,
21+
model: str,
22+
servers: Iterable[Dict],
23+
provider: Optional[PROVIDER_OR_POLICY_T] = None,
24+
api_key: Optional[str] = None,
25+
prompt: Optional[str] = None,
26+
):
27+
super().__init__(model=model, provider=provider, api_key=api_key)
28+
self._servers_cfg = list(servers)
29+
self.messages: List[Union[Dict, ChatCompletionInputMessage]] = [
30+
{"role": "system", "content": prompt or DEFAULT_SYSTEM_PROMPT}
31+
]
32+
33+
async def load_tools(self) -> None:
34+
for cfg in self._servers_cfg:
35+
await self.add_mcp_server(cfg["type"], **cfg["config"])
36+
37+
async def run(
38+
self,
39+
user_input: str,
40+
*,
41+
abort_event: Optional[asyncio.Event] = None,
42+
) -> AsyncGenerator[Union[ChatCompletionStreamOutput, ChatCompletionInputMessage], None]:
43+
self.messages.append({"role": "user", "content": user_input})
44+
45+
num_turns: int = 0
46+
next_turn_should_call_tools = True
47+
48+
while True:
49+
if abort_event and abort_event.is_set():
50+
return
51+
52+
async for item in self.process_single_turn_with_tools(
53+
self.messages,
54+
exit_loop_tools=EXIT_LOOP_TOOLS,
55+
exit_if_first_chunk_no_tool=(num_turns > 0 and next_turn_should_call_tools),
56+
):
57+
yield item
58+
59+
num_turns += 1
60+
last = self.messages[-1]
61+
62+
if last.get("role") == "tool" and last.get("name") in {t.function.name for t in EXIT_LOOP_TOOLS}:
63+
return
64+
65+
if last.get("role") != "tool" and num_turns > MAX_NUM_TURNS:
66+
return
67+
68+
if last.get("role") != "tool" and next_turn_should_call_tools:
69+
return
70+
71+
next_turn_should_call_tools = last.get("role") != "tool"
Lines changed: 153 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,153 @@
1+
import asyncio
2+
import os
3+
import signal
4+
from functools import partial
5+
from typing import Any, Dict, List, Optional
6+
7+
import typer
8+
from rich import print
9+
10+
from .agent import Agent
11+
from .utils import _load_agent_config
12+
13+
14+
app = typer.Typer(
15+
rich_markup_mode="rich",
16+
help="A squad of lightweight composable AI applications built on Hugging Face's Inference Client and MCP stack.",
17+
)
18+
19+
run_cli = typer.Typer(
20+
name="run",
21+
help="Run the Agent in the CLI",
22+
invoke_without_command=True,
23+
)
24+
app.add_typer(run_cli, name="run")
25+
26+
27+
async def _ainput(prompt: str = "» ") -> str:
28+
loop = asyncio.get_running_loop()
29+
return await loop.run_in_executor(None, partial(typer.prompt, prompt, prompt_suffix=" "))
30+
31+
32+
async def run_agent(
33+
agent_path: Optional[str],
34+
) -> None:
35+
"""
36+
Tiny Agent loop.
37+
38+
Args:
39+
agent_path (`str`, *optional*):
40+
Path to a local folder containing an `agent.json` and optionally a custom `PROMPT.md` file or a built-in agent stored in a Hugging Face dataset.
41+
42+
"""
43+
config, prompt = _load_agent_config(agent_path)
44+
45+
servers: List[Dict[str, Any]] = config.get("servers", [])
46+
47+
abort_event = asyncio.Event()
48+
first_sigint = True
49+
50+
loop = asyncio.get_running_loop()
51+
original_sigint_handler = signal.getsignal(signal.SIGINT)
52+
53+
def _sigint_handler() -> None:
54+
nonlocal first_sigint
55+
if first_sigint:
56+
first_sigint = False
57+
abort_event.set()
58+
print("\n[red]Interrupted. Press Ctrl+C again to quit.[/red]", flush=True)
59+
return
60+
61+
print("\n[red]Exiting...[/red]", flush=True)
62+
63+
os._exit(130)
64+
65+
try:
66+
loop.add_signal_handler(signal.SIGINT, _sigint_handler)
67+
68+
async with Agent(
69+
provider=config["provider"],
70+
model=config["model"],
71+
servers=servers,
72+
prompt=prompt,
73+
) as agent:
74+
await agent.load_tools()
75+
print(f"[bold blue]Agent loaded with {len(agent.available_tools)} tools:[/bold blue]")
76+
for t in agent.available_tools:
77+
print(f"[blue] • {t.function.name}[/blue]")
78+
79+
while True:
80+
abort_event.clear()
81+
82+
try:
83+
user_input = await _ainput()
84+
first_sigint = True
85+
except EOFError:
86+
print("\n[red]EOF received, exiting.[/red]", flush=True)
87+
break
88+
except KeyboardInterrupt:
89+
if not first_sigint and abort_event.is_set():
90+
continue
91+
else:
92+
print("\n[red]Keyboard interrupt during input processing.[/red]", flush=True)
93+
break
94+
95+
try:
96+
async for chunk in agent.run(user_input, abort_event=abort_event):
97+
if abort_event.is_set() and not first_sigint:
98+
break
99+
100+
if hasattr(chunk, "choices"):
101+
delta = chunk.choices[0].delta
102+
if delta.content:
103+
print(delta.content, end="", flush=True)
104+
if delta.tool_calls:
105+
for call in delta.tool_calls:
106+
if call.id:
107+
print(f"<Tool {call.id}>", end="")
108+
if call.function.name:
109+
print(f"{call.function.name}", end=" ")
110+
if call.function.arguments:
111+
print(f"{call.function.arguments}", end="")
112+
else:
113+
print(
114+
f"\n\n[green]Tool[{chunk.name}] {chunk.tool_call_id}\n{chunk.content}[/green]\n",
115+
flush=True,
116+
)
117+
118+
print()
119+
120+
except Exception as e:
121+
print(f"\n[bold red]Error during agent run: {e}[/bold red]", flush=True)
122+
first_sigint = True # Allow graceful interrupt for the next command
123+
124+
finally:
125+
if loop and not loop.is_closed():
126+
loop.remove_signal_handler(signal.SIGINT)
127+
elif original_sigint_handler:
128+
signal.signal(signal.SIGINT, original_sigint_handler)
129+
130+
131+
@run_cli.callback()
132+
def run(
133+
path: Optional[str] = typer.Argument(
134+
None,
135+
help=(
136+
"Path to a local folder containing an agent.json file or a built-in agent "
137+
"stored in a Hugging Face dataset (default: "
138+
"https://huggingface.co/datasets/tiny-agents/tiny-agents)"
139+
),
140+
),
141+
):
142+
try:
143+
asyncio.run(run_agent(path))
144+
except KeyboardInterrupt:
145+
print("\n[red]Application terminated by KeyboardInterrupt.[/red]", flush=True)
146+
raise typer.Exit(code=130)
147+
except Exception as e:
148+
print(f"\n[bold red]An unexpected error occurred: {e}[/bold red]", flush=True)
149+
raise e
150+
151+
152+
if __name__ == "__main__":
153+
app()
Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
from __future__ import annotations
2+
3+
import sys
4+
from pathlib import Path
5+
from typing import List
6+
7+
from huggingface_hub import ChatCompletionInputTool
8+
9+
10+
FILENAME_CONFIG = "agent.json"
11+
FILENAME_PROMPT = "PROMPT.md"
12+
13+
DEFAULT_AGENT = {
14+
"model": "Qwen/Qwen2.5-72B-Instruct",
15+
"provider": "nebius",
16+
"servers": [
17+
{
18+
"type": "stdio",
19+
"config": {
20+
"command": "npx",
21+
"args": [
22+
"-y",
23+
"@modelcontextprotocol/server-filesystem",
24+
str(Path.home() / ("Desktop" if sys.platform == "darwin" else "")),
25+
],
26+
},
27+
},
28+
{
29+
"type": "stdio",
30+
"config": {
31+
"command": "npx",
32+
"args": ["@playwright/mcp@latest"],
33+
},
34+
},
35+
],
36+
}
37+
38+
39+
DEFAULT_SYSTEM_PROMPT = """
40+
You are an agent - please keep going until the user’s query is completely
41+
resolved, before ending your turn and yielding back to the user. Only terminate
42+
your turn when you are sure that the problem is solved, or if you need more
43+
info from the user to solve the problem.
44+
If you are not sure about anything pertaining to the user’s request, use your
45+
tools to read files and gather the relevant information: do NOT guess or make
46+
up an answer.
47+
You MUST plan extensively before each function call, and reflect extensively
48+
on the outcomes of the previous function calls. DO NOT do this entire process
49+
by making function calls only, as this can impair your ability to solve the
50+
problem and think insightfully.
51+
""".strip()
52+
53+
MAX_NUM_TURNS = 10
54+
55+
TASK_COMPLETE_TOOL: ChatCompletionInputTool = ChatCompletionInputTool.parse_obj( # type: ignore[assignment]
56+
{
57+
"type": "function",
58+
"function": {
59+
"name": "task_complete",
60+
"description": "Call this tool when the task given by the user is complete",
61+
"parameters": {"type": "object", "properties": {}},
62+
},
63+
}
64+
)
65+
66+
ASK_QUESTION_TOOL: ChatCompletionInputTool = ChatCompletionInputTool.parse_obj( # type: ignore[assignment]
67+
{
68+
"type": "function",
69+
"function": {
70+
"name": "ask_question",
71+
"description": "Ask the user for more info required to solve or clarify their problem.",
72+
"parameters": {"type": "object", "properties": {}},
73+
},
74+
}
75+
)
76+
77+
EXIT_LOOP_TOOLS: List[ChatCompletionInputTool] = [TASK_COMPLETE_TOOL, ASK_QUESTION_TOOL]
78+
79+
80+
DEFAULT_REPO_ID = "tiny-agents/tiny-agents"

src/huggingface_hub/inference/_mcp/utils.py

Lines changed: 50 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,17 @@
11
"""
2-
Utility functions for formatting results from mcp.CallToolResult.
2+
Utility functions for MCPClient and Tiny Agents.
33
4-
Taken from the JS SDK: https://github.com/huggingface/huggingface.js/blob/main/packages/mcp-client/src/ResultFormatter.ts.
4+
Formatting utilities taken from the JS SDK: https://github.com/huggingface/huggingface.js/blob/main/packages/mcp-client/src/ResultFormatter.ts.
55
"""
66

7-
from typing import TYPE_CHECKING, List
7+
import json
8+
from pathlib import Path
9+
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple
10+
11+
from huggingface_hub import snapshot_download
12+
from huggingface_hub.errors import EntryNotFoundError
13+
14+
from .constants import DEFAULT_AGENT, DEFAULT_REPO_ID, FILENAME_CONFIG, FILENAME_PROMPT
815

916

1017
if TYPE_CHECKING:
@@ -74,3 +81,43 @@ def _get_base64_size(base64_str: str) -> int:
7481
padding = 1
7582

7683
return (len(base64_str) * 3) // 4 - padding
84+
85+
86+
def _load_agent_config(agent_path: Optional[str]) -> Tuple[Dict[str, Any], Optional[str]]:
87+
"""Load server config and prompt."""
88+
89+
def _read_dir(directory: Path) -> Tuple[Dict[str, Any], Optional[str]]:
90+
cfg_file = directory / FILENAME_CONFIG
91+
if not cfg_file.exists():
92+
raise FileNotFoundError(f" Config file not found in {directory}! Please make sure it exists locally")
93+
94+
config: Dict[str, Any] = json.loads(cfg_file.read_text(encoding="utf-8"))
95+
prompt_file = directory / FILENAME_PROMPT
96+
prompt: Optional[str] = prompt_file.read_text(encoding="utf-8") if prompt_file.exists() else None
97+
return config, prompt
98+
99+
if agent_path is None:
100+
return DEFAULT_AGENT, None
101+
102+
path = Path(agent_path).expanduser()
103+
104+
if path.is_file():
105+
return json.loads(path.read_text(encoding="utf-8")), None
106+
107+
if path.is_dir():
108+
return _read_dir(path)
109+
110+
# fetch from the Hub
111+
try:
112+
repo_dir = Path(
113+
snapshot_download(
114+
repo_id=DEFAULT_REPO_ID,
115+
allow_patterns=f"{agent_path}/*",
116+
repo_type="dataset",
117+
)
118+
)
119+
return _read_dir(repo_dir / agent_path)
120+
except Exception as err:
121+
raise EntryNotFoundError(
122+
f" Agent {agent_path} not found in tiny-agents/tiny-agents! Please make sure it exists in https://huggingface.co/datasets/tiny-agents/tiny-agents."
123+
) from err

0 commit comments

Comments
 (0)