From c920c15f350167e944a7d5342bedb61c32eafba5 Mon Sep 17 00:00:00 2001 From: Evan Mattson Date: Tue, 25 Mar 2025 16:24:27 +0900 Subject: [PATCH 1/4] agent as func --- ...hat_completion_agent_as_kernel_function.py | 102 ++++++++++++++++++ python/semantic_kernel/agents/agent.py | 32 +++++- .../functions/kernel_function_extension.py | 4 +- .../functions/kernel_plugin.py | 1 + python/uv.lock | 73 ++++++------- 5 files changed, 168 insertions(+), 44 deletions(-) create mode 100644 python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_as_kernel_function.py diff --git a/python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_as_kernel_function.py b/python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_as_kernel_function.py new file mode 100644 index 000000000000..6a7394ee14aa --- /dev/null +++ b/python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_as_kernel_function.py @@ -0,0 +1,102 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from semantic_kernel import Kernel +from semantic_kernel.agents import ChatCompletionAgent, ChatHistoryAgentThread +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.filters import FunctionInvocationContext + +""" +Todo +""" + + +# Define the auto function invocation filter that will be used by the kernel +async def function_invocation_filter(context: FunctionInvocationContext, next): + """A filter that will be called for each function call in the response.""" + if "messages" not in context.arguments: + await next(context) + return + print(f" Agent {context.function.name} called with messages: {context.arguments['messages']}") + await next(context) + print(f" Response from agent {context.function.name}: {context.result.value}") + + +# Create and configure the kernel. +kernel = Kernel() +kernel.add_filter("function_invocation", function_invocation_filter) + +english_agent = ChatCompletionAgent( + service=AzureChatCompletion(), + name="EnglishAgent", + instructions="Your job is to help fulfill the user's request in English.", +) + +spanish_agent = ChatCompletionAgent( + service=AzureChatCompletion(), + name="SpanishAgent", + instructions="Your job is to help fulfill the user's request in Spanish.", +) + +french_agent = ChatCompletionAgent( + service=AzureChatCompletion(), + name="FrenchAgent", + instructions="Your job is to help fulfill the user's request in French.", +) + +router_agent = ChatCompletionAgent( + service=AzureChatCompletion(), + kernel=kernel, + name="Router", + instructions="This agent routes requests to the appropriate agent,", + plugins=[english_agent, spanish_agent, french_agent], +) + +thread: ChatHistoryAgentThread = None + + +async def chat() -> bool: + """ + Continuously prompt the user for input and show the assistant's response. + Type 'exit' to exit. + """ + try: + user_input = input("User:> ") + except (KeyboardInterrupt, EOFError): + print("\n\nExiting chat...") + return False + + if user_input.lower().strip() == "exit": + print("\n\nExiting chat...") + return False + + response = await router_agent.get_response( + messages=user_input, + thread=thread, + ) + + if response: + print(f"Agent :> {response}") + + return True + + +""" +Todo +""" + + +async def main() -> None: + print( + "Welcome to the chat bot!\n" + " Type 'exit' to exit.\n" + " Try to get some copy written by the copy writer, make sure to ask it is reviewed.)." + ) + chatting = True + while chatting: + chatting = await chat() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/semantic_kernel/agents/agent.py b/python/semantic_kernel/agents/agent.py index efa8aa50c710..9b0f352c2e0f 100644 --- a/python/semantic_kernel/agents/agent.py +++ b/python/semantic_kernel/agents/agent.py @@ -4,7 +4,7 @@ import uuid from abc import ABC, abstractmethod from collections.abc import AsyncIterable, Awaitable, Callable, Iterable, Sequence -from typing import Any, ClassVar, Generic, TypeVar +from typing import Annotated, Any, ClassVar, Generic, TypeVar from pydantic import Field, model_validator @@ -13,6 +13,7 @@ from semantic_kernel.contents.streaming_chat_message_content import StreamingChatMessageContent from semantic_kernel.contents.utils.author_role import AuthorRole from semantic_kernel.exceptions.agent_exceptions import AgentExecutionException +from semantic_kernel.functions import kernel_function from semantic_kernel.functions.kernel_arguments import KernelArguments from semantic_kernel.functions.kernel_plugin import KernelPlugin from semantic_kernel.kernel import Kernel @@ -206,6 +207,35 @@ def _configure_plugins(cls, data: Any) -> Any: data["kernel"] = kernel return data + def model_post_init(self, __context: Any) -> None: + """Post initialization: create a kernel_function that calls this agent's get_response().""" + + @kernel_function(name=self.name, description=self.description) + async def _as_kernel_function( + messages: Annotated[str | list[str], "The user messages for the agent."], + instructions_override: Annotated[str | None, "Override agent instructions."] = None, + ) -> Annotated[str, "Agent response."]: + """A Minimal universal function for all agents. + + Exposes 'messages' and 'instructions_override'. + Internally, we pass them to get_response() for whichever agent is calling it. + """ + # Convert a single string to a list if necessary + if isinstance(messages, str): + messages = [messages] + + # Forward to get_response(), passing the optional override + response_item = await self.get_response( + messages=messages, instructions_override=instructions_override if instructions_override else None + ) + + # Return the final .content + return response_item.content + + # To keep Pydantic happy, it needs to be marked with an underscore + # so it doesn't try to validate the function signature. + setattr(self, "_as_kernel_function", _as_kernel_function) + @abstractmethod def get_response(self, *args, **kwargs) -> Awaitable[AgentResponseItem[ChatMessageContent]]: """Get a response from the agent. diff --git a/python/semantic_kernel/functions/kernel_function_extension.py b/python/semantic_kernel/functions/kernel_function_extension.py index 7cc963f94e28..4304c187b1d0 100644 --- a/python/semantic_kernel/functions/kernel_function_extension.py +++ b/python/semantic_kernel/functions/kernel_function_extension.py @@ -85,7 +85,7 @@ def add_plugin( self.plugins[plugin.name] = plugin return self.plugins[plugin.name] if not plugin_name: - raise ValueError("plugin_name must be provided if a plugin is not supplied.") + plugin_name = getattr(plugin, "name", plugin.__class__.__name__) if not isinstance(plugin_name, str): raise TypeError("plugin_name must be a string.") if plugin: @@ -103,7 +103,7 @@ def add_plugin( return self.plugins[plugin_name] raise ValueError("plugin or parent_directory must be provided.") - def add_plugins(self, plugins: list[KernelPlugin] | dict[str, KernelPlugin | object]) -> None: + def add_plugins(self, plugins: list[KernelPlugin | object] | dict[str, KernelPlugin | object]) -> None: """Adds a list of plugins to the kernel's collection of plugins. Args: diff --git a/python/semantic_kernel/functions/kernel_plugin.py b/python/semantic_kernel/functions/kernel_plugin.py index c373de0b1238..dd95caf6190b 100644 --- a/python/semantic_kernel/functions/kernel_plugin.py +++ b/python/semantic_kernel/functions/kernel_plugin.py @@ -238,6 +238,7 @@ def from_object( candidates = plugin_instance.items() else: candidates = inspect.getmembers(plugin_instance, inspect.ismethod) + candidates.extend(inspect.getmembers(plugin_instance, inspect.isfunction)) # type: ignore # Read every method from the plugin instance functions = [ KernelFunctionFromMethod(method=candidate, plugin_name=plugin_name) diff --git a/python/uv.lock b/python/uv.lock index 598e172a9aed..ff236373b73c 100644 --- a/python/uv.lock +++ b/python/uv.lock @@ -1,4 +1,5 @@ version = 1 +revision = 1 requires-python = ">=3.10" resolution-markers = [ "python_full_version < '3.11' and sys_platform == 'darwin'", @@ -590,7 +591,7 @@ name = "build" version = "1.2.2.post1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "colorama", marker = "(os_name == 'nt' and sys_platform == 'darwin') or (os_name == 'nt' and sys_platform == 'linux') or (os_name == 'nt' and sys_platform == 'win32')" }, + { name = "colorama", marker = "os_name == 'nt' and sys_platform == 'win32'" }, { name = "importlib-metadata", marker = "(python_full_version < '3.10.2' and sys_platform == 'darwin') or (python_full_version < '3.10.2' and sys_platform == 'linux') or (python_full_version < '3.10.2' and sys_platform == 'win32')" }, { name = "packaging", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "pyproject-hooks", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -833,7 +834,7 @@ name = "click" version = "8.1.8" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "colorama", marker = "(platform_system == 'Windows' and sys_platform == 'darwin') or (platform_system == 'Windows' and sys_platform == 'linux') or (platform_system == 'Windows' and sys_platform == 'win32')" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 } wheels = [ @@ -1595,6 +1596,7 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/24/8a/e2d52d4111d3ef48e0a8b2324e80b600f6c5339dae9827744b94325a0b6a/google_crc32c-1.7.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:6a40522958040051c755a173eb98c05ad4d64a6dd898888c3e5ccca2d1cbdcdc", size = 30149 }, { url = "https://files.pythonhosted.org/packages/da/60/8cd1605391da56c55db67ef64660e72b89787755a271ebe04f9418320b19/google_crc32c-1.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f714fe5cdf5007d7064c57cf7471a99e0cbafda24ddfa829117fc3baafa424f7", size = 32972 }, { url = "https://files.pythonhosted.org/packages/3d/65/065ffe1bb324709e7704205261b429588ed17bbde8b7940eb12b140a6082/google_crc32c-1.7.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f04e58dbe1bf0c9398e603a9be5aaa09e0ba7eb022a3293195d8749459a01069", size = 32616 }, + { url = "https://files.pythonhosted.org/packages/16/b8/95e6a189f5764abd643e3df5ab06e291d379f26ff77ea97bbcb12ac9207b/google_crc32c-1.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:e545b51ddf97f604d30114f7c23eecaf4c06cd6c023ff1ae0b80dcd99af32833", size = 33496 }, { url = "https://files.pythonhosted.org/packages/1b/30/51b10f995be9bd09551d050117c571f9749297717fcc2e7946e242eb4830/google_crc32c-1.7.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:364067b063664dd8d1fec75a3fe85edf05c46f688365269beccaf42ef5dfe889", size = 33080 }, { url = "https://files.pythonhosted.org/packages/4d/ec/a7ca773559c7ab6919ac0255d84c74571c8cecf0e8891036705f6861c04d/google_crc32c-1.7.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1b0d6044799f6ac51d1cc2decb997280a83c448b3bef517a54b57a3b71921c0", size = 32708 }, { url = "https://files.pythonhosted.org/packages/35/9e/0fca77ec4a5d4651e33662c62d44418cb1c37bd04b22f6368a0f7a7abefa/google_crc32c-1.7.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b8f48dddd1451026a517d7eb1f8c4ee2491998bfa383abb5fdebf32b0aa333e", size = 28080 }, @@ -2079,7 +2081,7 @@ name = "ipykernel" version = "6.29.5" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "appnope", marker = "(platform_system == 'Darwin' and sys_platform == 'darwin') or (platform_system == 'Darwin' and sys_platform == 'linux') or (platform_system == 'Darwin' and sys_platform == 'win32')" }, + { name = "appnope", marker = "sys_platform == 'darwin'" }, { name = "comm", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "debugpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "ipython", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -2972,7 +2974,6 @@ name = "nvidia-cublas-cu12" version = "12.4.5.8" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7f/7f/7fbae15a3982dc9595e49ce0f19332423b260045d0a6afe93cdbe2f1f624/nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0f8aa1706812e00b9f19dfe0cdb3999b092ccb8ca168c0db5b8ea712456fd9b3", size = 363333771 }, { url = "https://files.pythonhosted.org/packages/ae/71/1c91302526c45ab494c23f61c7a84aa568b8c1f9d196efa5993957faf906/nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_x86_64.whl", hash = "sha256:2fc8da60df463fdefa81e323eef2e36489e1c94335b5358bcb38360adf75ac9b", size = 363438805 }, ] @@ -2981,7 +2982,6 @@ name = "nvidia-cuda-cupti-cu12" version = "12.4.127" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/93/b5/9fb3d00386d3361b03874246190dfec7b206fd74e6e287b26a8fcb359d95/nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:79279b35cf6f91da114182a5ce1864997fd52294a87a16179ce275773799458a", size = 12354556 }, { url = "https://files.pythonhosted.org/packages/67/42/f4f60238e8194a3106d06a058d494b18e006c10bb2b915655bd9f6ea4cb1/nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:9dec60f5ac126f7bb551c055072b69d85392b13311fcc1bcda2202d172df30fb", size = 13813957 }, ] @@ -2990,7 +2990,6 @@ name = "nvidia-cuda-nvrtc-cu12" version = "12.4.127" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/77/aa/083b01c427e963ad0b314040565ea396f914349914c298556484f799e61b/nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0eedf14185e04b76aa05b1fea04133e59f465b6f960c0cbf4e37c3cb6b0ea198", size = 24133372 }, { url = "https://files.pythonhosted.org/packages/2c/14/91ae57cd4db3f9ef7aa99f4019cfa8d54cb4caa7e00975df6467e9725a9f/nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:a178759ebb095827bd30ef56598ec182b85547f1508941a3d560eb7ea1fbf338", size = 24640306 }, ] @@ -2999,7 +2998,6 @@ name = "nvidia-cuda-runtime-cu12" version = "12.4.127" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/aa/b656d755f474e2084971e9a297def515938d56b466ab39624012070cb773/nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:961fe0e2e716a2a1d967aab7caee97512f71767f852f67432d572e36cb3a11f3", size = 894177 }, { url = "https://files.pythonhosted.org/packages/ea/27/1795d86fe88ef397885f2e580ac37628ed058a92ed2c39dc8eac3adf0619/nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:64403288fa2136ee8e467cdc9c9427e0434110899d07c779f25b5c068934faa5", size = 883737 }, ] @@ -3008,7 +3006,7 @@ name = "nvidia-cudnn-cu12" version = "9.1.0.70" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-cublas-cu12", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "nvidia-cublas-cu12", marker = "sys_platform == 'linux'" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/9f/fd/713452cd72343f682b1c7b9321e23829f00b842ceaedcda96e742ea0b0b3/nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl", hash = "sha256:165764f44ef8c61fcdfdfdbe769d687e06374059fbb388b6c89ecb0e28793a6f", size = 664752741 }, @@ -3019,10 +3017,9 @@ name = "nvidia-cufft-cu12" version = "11.2.1.3" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-nvjitlink-cu12", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "nvidia-nvjitlink-cu12", marker = "sys_platform == 'linux'" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/8a/0e728f749baca3fbeffad762738276e5df60851958be7783af121a7221e7/nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_aarch64.whl", hash = "sha256:5dad8008fc7f92f5ddfa2101430917ce2ffacd86824914c82e28990ad7f00399", size = 211422548 }, { url = "https://files.pythonhosted.org/packages/27/94/3266821f65b92b3138631e9c8e7fe1fb513804ac934485a8d05776e1dd43/nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f083fc24912aa410be21fa16d157fed2055dab1cc4b6934a0e03cba69eb242b9", size = 211459117 }, ] @@ -3031,7 +3028,6 @@ name = "nvidia-curand-cu12" version = "10.3.5.147" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/80/9c/a79180e4d70995fdf030c6946991d0171555c6edf95c265c6b2bf7011112/nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1f173f09e3e3c76ab084aba0de819c49e56614feae5c12f69883f4ae9bb5fad9", size = 56314811 }, { url = "https://files.pythonhosted.org/packages/8a/6d/44ad094874c6f1b9c654f8ed939590bdc408349f137f9b98a3a23ccec411/nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_x86_64.whl", hash = "sha256:a88f583d4e0bb643c49743469964103aa59f7f708d862c3ddb0fc07f851e3b8b", size = 56305206 }, ] @@ -3040,12 +3036,11 @@ name = "nvidia-cusolver-cu12" version = "11.6.1.9" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-cublas-cu12", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "nvidia-cusparse-cu12", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "nvidia-nvjitlink-cu12", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "nvidia-cublas-cu12", marker = "sys_platform == 'linux'" }, + { name = "nvidia-cusparse-cu12", marker = "sys_platform == 'linux'" }, + { name = "nvidia-nvjitlink-cu12", marker = "sys_platform == 'linux'" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/46/6b/a5c33cf16af09166845345275c34ad2190944bcc6026797a39f8e0a282e0/nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_aarch64.whl", hash = "sha256:d338f155f174f90724bbde3758b7ac375a70ce8e706d70b018dd3375545fc84e", size = 127634111 }, { url = "https://files.pythonhosted.org/packages/3a/e1/5b9089a4b2a4790dfdea8b3a006052cfecff58139d5a4e34cb1a51df8d6f/nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_x86_64.whl", hash = "sha256:19e33fa442bcfd085b3086c4ebf7e8debc07cfe01e11513cc6d332fd918ac260", size = 127936057 }, ] @@ -3054,10 +3049,9 @@ name = "nvidia-cusparse-cu12" version = "12.3.1.170" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "nvidia-nvjitlink-cu12", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "nvidia-nvjitlink-cu12", marker = "sys_platform == 'linux'" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/96/a9/c0d2f83a53d40a4a41be14cea6a0bf9e668ffcf8b004bd65633f433050c0/nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_aarch64.whl", hash = "sha256:9d32f62896231ebe0480efd8a7f702e143c98cfaa0e8a76df3386c1ba2b54df3", size = 207381987 }, { url = "https://files.pythonhosted.org/packages/db/f7/97a9ea26ed4bbbfc2d470994b8b4f338ef663be97b8f677519ac195e113d/nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ea4f11a2904e2a8dc4b1833cc1b5181cde564edd0d5cd33e3c168eff2d1863f1", size = 207454763 }, ] @@ -3066,7 +3060,6 @@ name = "nvidia-cusparselt-cu12" version = "0.6.2" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/98/8e/675498726c605c9441cf46653bd29cb1b8666da1fb1469ffa25f67f20c58/nvidia_cusparselt_cu12-0.6.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:067a7f6d03ea0d4841c85f0c6f1991c5dda98211f6302cb83a4ab234ee95bef8", size = 149422781 }, { url = "https://files.pythonhosted.org/packages/78/a8/bcbb63b53a4b1234feeafb65544ee55495e1bb37ec31b999b963cbccfd1d/nvidia_cusparselt_cu12-0.6.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:df2c24502fd76ebafe7457dbc4716b2fec071aabaed4fb7691a201cde03704d9", size = 150057751 }, ] @@ -3083,7 +3076,6 @@ name = "nvidia-nvjitlink-cu12" version = "12.4.127" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/02/45/239d52c05074898a80a900f49b1615d81c07fceadd5ad6c4f86a987c0bc4/nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:4abe7fef64914ccfa909bc2ba39739670ecc9e820c83ccc7a6ed414122599b83", size = 20552510 }, { url = "https://files.pythonhosted.org/packages/ff/ff/847841bacfbefc97a00036e0fce5a0f086b640756dc38caea5e1bb002655/nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:06b3b9b25bf3f8af351d664978ca26a16d2c5127dbd53c0497e28d1fb9611d57", size = 21066810 }, ] @@ -3092,7 +3084,6 @@ name = "nvidia-nvtx-cu12" version = "12.4.127" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/06/39/471f581edbb7804b39e8063d92fc8305bdc7a80ae5c07dbe6ea5c50d14a5/nvidia_nvtx_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7959ad635db13edf4fc65c06a6e9f9e55fc2f92596db928d169c0bb031e88ef3", size = 100417 }, { url = "https://files.pythonhosted.org/packages/87/20/199b8713428322a2f22b722c62b8cc278cc53dffa9705d744484b5035ee9/nvidia_nvtx_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:781e950d9b9f60d8241ccea575b32f5105a5baf4c2351cab5256a24869f12a1a", size = 99144 }, ] @@ -3156,8 +3147,8 @@ name = "onnxruntime-genai" version = "0.6.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "numpy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "onnxruntime", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, + { name = "numpy", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, + { name = "onnxruntime", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux') or (python_full_version < '3.13' and sys_platform == 'win32')" }, ] wheels = [ { url = "https://files.pythonhosted.org/packages/5f/7f/3e1edde3318458aabdd6070c44bedc2caa913949530d90ec89c32c76a036/onnxruntime_genai-0.6.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:b820e20e438fc2679db24e432c5652e20a972709e4002210a46b4f6282fd57d4", size = 871347 }, @@ -3687,7 +3678,7 @@ name = "portalocker" version = "2.10.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pywin32", marker = "(platform_system == 'Windows' and sys_platform == 'darwin') or (platform_system == 'Windows' and sys_platform == 'linux') or (platform_system == 'Windows' and sys_platform == 'win32')" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/ed/d3/c6c64067759e87af98cc668c1cc75171347d0f1577fab7ca3749134e3cd4/portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f", size = 40891 } wheels = [ @@ -5146,7 +5137,6 @@ wheels = [ [[package]] name = "semantic-kernel" -version = "1.25.0" source = { editable = "." } dependencies = [ { name = "aiohttp", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, @@ -5208,7 +5198,7 @@ hugging-face = [ { name = "transformers", extra = ["torch"], marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] milvus = [ - { name = "milvus", marker = "(platform_system != 'Windows' and sys_platform == 'darwin') or (platform_system != 'Windows' and sys_platform == 'linux') or (platform_system != 'Windows' and sys_platform == 'win32')" }, + { name = "milvus", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" }, { name = "pymilvus", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] mistralai = [ @@ -5299,7 +5289,7 @@ requires-dist = [ { name = "google-generativeai", marker = "extra == 'google'", specifier = "~=0.8" }, { name = "ipykernel", marker = "extra == 'notebooks'", specifier = "~=6.29" }, { name = "jinja2", specifier = "~=3.1" }, - { name = "milvus", marker = "platform_system != 'Windows' and extra == 'milvus'", specifier = ">=2.3,<2.3.8" }, + { name = "milvus", marker = "sys_platform != 'win32' and extra == 'milvus'", specifier = ">=2.3,<2.3.8" }, { name = "mistralai", marker = "extra == 'mistralai'", specifier = ">=1.2,<2.0" }, { name = "motor", marker = "extra == 'mongo'", specifier = ">=3.3.2,<3.8.0" }, { name = "nest-asyncio", specifier = "~=1.6" }, @@ -5335,6 +5325,7 @@ requires-dist = [ { name = "websockets", specifier = ">=13,<16" }, { name = "websockets", marker = "extra == 'realtime'", specifier = ">=13,<16" }, ] +provides-extras = ["anthropic", "autogen", "aws", "azure", "chroma", "dapr", "faiss", "google", "hugging-face", "milvus", "mistralai", "mongo", "notebooks", "ollama", "onnx", "pandas", "pinecone", "postgres", "qdrant", "realtime", "redis", "usearch", "weaviate"] [package.metadata.requires-dev] dev = [ @@ -5736,22 +5727,22 @@ dependencies = [ { name = "fsspec", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "jinja2", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, { name = "networkx", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "nvidia-cublas-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, - { name = "nvidia-cuda-cupti-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, - { name = "nvidia-cuda-nvrtc-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, - { name = "nvidia-cuda-runtime-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, - { name = "nvidia-cudnn-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, - { name = "nvidia-cufft-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, - { name = "nvidia-curand-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, - { name = "nvidia-cusolver-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, - { name = "nvidia-cusparse-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, - { name = "nvidia-cusparselt-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, - { name = "nvidia-nccl-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, - { name = "nvidia-nvjitlink-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, - { name = "nvidia-nvtx-cu12", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, + { name = "nvidia-cublas-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cuda-cupti-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cuda-nvrtc-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cuda-runtime-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cudnn-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cufft-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-curand-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cusolver-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cusparse-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-cusparselt-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-nccl-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-nvjitlink-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, + { name = "nvidia-nvtx-cu12", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "setuptools", marker = "(python_full_version >= '3.12' and sys_platform == 'darwin') or (python_full_version >= '3.12' and sys_platform == 'linux') or (python_full_version >= '3.12' and sys_platform == 'win32')" }, { name = "sympy", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, - { name = "triton", marker = "(platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'darwin') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'linux') or (platform_machine == 'x86_64' and platform_system == 'Linux' and sys_platform == 'win32')" }, + { name = "triton", marker = "platform_machine == 'x86_64' and sys_platform == 'linux'" }, { name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux' or sys_platform == 'win32'" }, ] wheels = [ @@ -5796,7 +5787,7 @@ name = "tqdm" version = "4.67.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "colorama", marker = "(platform_system == 'Windows' and sys_platform == 'darwin') or (platform_system == 'Windows' and sys_platform == 'linux') or (platform_system == 'Windows' and sys_platform == 'win32')" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737 } wheels = [ From 6c4b4575143a9d644746b70ffb5b518398b81aed Mon Sep 17 00:00:00 2001 From: Evan Mattson Date: Tue, 25 Mar 2025 19:52:01 +0900 Subject: [PATCH 2/4] Support any SK agent to be used as a kernel function. Adds samples. --- python/samples/concepts/README.md | 40 ++--- .../azure_ai_agent_as_kernel_function.py | 154 ++++++++++++++++++ ...hat_completion_agent_as_kernel_function.py | 91 ++++++++--- ..._completion_agent_function_termination.py} | 0 ...hat_completion_agent_prompt_templating.py} | 0 ...ent_summary_history_reducer_agent_chat.py} | 0 ...t_summary_history_reducer_single_agent.py} | 0 ...nt_truncate_history_reducer_agent_chat.py} | 0 ..._truncate_history_reducer_single_agent.py} | 0 python/semantic_kernel/agents/agent.py | 13 +- 10 files changed, 245 insertions(+), 53 deletions(-) create mode 100644 python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_as_kernel_function.py rename python/samples/concepts/agents/chat_completion_agent/{chat_completion_function_termination.py => chat_completion_agent_function_termination.py} (100%) rename python/samples/concepts/agents/chat_completion_agent/{chat_completion_prompt_templating.py => chat_completion_agent_prompt_templating.py} (100%) rename python/samples/concepts/agents/chat_completion_agent/{chat_completion_summary_history_reducer_agent_chat.py => chat_completion_agent_summary_history_reducer_agent_chat.py} (100%) rename python/samples/concepts/agents/chat_completion_agent/{chat_completion_summary_history_reducer_single_agent.py => chat_completion_agent_summary_history_reducer_single_agent.py} (100%) rename python/samples/concepts/agents/chat_completion_agent/{chat_completion_truncate_history_reducer_agent_chat.py => chat_completion_agent_truncate_history_reducer_agent_chat.py} (100%) rename python/samples/concepts/agents/chat_completion_agent/{chat_completion_truncate_history_reducer_single_agent.py => chat_completion_agent_truncate_history_reducer_single_agent.py} (100%) diff --git a/python/samples/concepts/README.md b/python/samples/concepts/README.md index f31c833413cb..2c3fbf8d6846 100644 --- a/python/samples/concepts/README.md +++ b/python/samples/concepts/README.md @@ -4,21 +4,9 @@ ### Agents - Creating and using [agents](../../semantic_kernel/agents/) in Semantic Kernel -#### [OpenAI Assistant Agent](../../semantic_kernel/agents/open_ai/open_ai_assistant_agent.py) - -- [OpenAI Assistant Chart Maker Streaming](./agents/openai_assistant/openai_assistant_chart_maker_streaming.py) -- [OpenAI Assistant Chart Maker](./agents/openai_assistant/openai_assistant_chart_maker.py) -- [OpenAI Assistant File Manipulation Streaming](./agents/openai_assistant/openai_assistant_file_manipulation_streaming.py) -- [OpenAI Assistant File Manipulation](./agents/openai_assistant/openai_assistant_file_manipulation.py) -- [OpenAI Assistant File Manipulation Streaming](./agents/openai_assistant/openai_assistant_file_manipulation_streaming.py) -- [OpenAI Assistant Retrieval](./agents/openai_assistant/openai_assistant_retrieval.py) -- [OpenAI Assistant Streaming](./agents/openai_assistant/openai_assistant_streaming.py) -- [OpenAI Assistant Structured Outputs](./agents/openai_assistant/openai_assistant_structured_outputs.py) -- [OpenAI Assistant Templating Streaming](./agents/openai_assistant/openai_assistant_templating_streaming.py) -- [OpenAI Assistant Vision Streaming](./agents/openai_assistant/openai_assistant_vision_streaming.py) - #### [Azure AI Agent](../../semantic_kernel/agents/azure_ai/azure_ai_agent.py) +- [Azure AI Agent as Kernel Function](./agents/azure_ai_agent/azure_ai_agent_as_kernel_function.py) - [Azure AI Agent with Azure AI Search](./agents/azure_ai_agent/azure_ai_agent_azure_ai_search.py) - [Azure AI Agent File Manipulation](./agents/azure_ai_agent/azure_ai_agent_file_manipulation.py) - [Azure AI Agent Streaming](./agents/azure_ai_agent/azure_ai_agent_streaming.py) @@ -37,12 +25,13 @@ #### [Chat Completion Agent](../../semantic_kernel/agents/chat_completion/chat_completion_agent.py) -- [Chat Completion Function Termination](./agents/chat_completion_agent/chat_completion_function_termination.py) -- [Chat Completion Templating](./agents/chat_completion_agent/chat_completion_prompt_templating.py) -- [Chat Completion Summary History Reducer Agent Chat](./agents/chat_completion_agent/chat_completion_summary_history_reducer_agent_chat.py) -- [Chat Completion Summary History Reducer Single Agent](./agents/chat_completion_agent/chat_completion_summary_history_reducer_single_agent.py) -- [Chat Completion Truncate History Reducer Agent Chat](./agents/chat_completion_agent/chat_completion_truncate_history_reducer_agent_chat.py) -- [Chat Completion Truncate History Reducer Single Agent](./agents/chat_completion_agent/chat_completion_truncate_history_reducer_single_agent.py) +- [Chat Completion Agent as Kernel Function](./agents/chat_completion_agent/chat_completion_agent_as_kernel_function.py) +- [Chat Completion Agent Function Termination](./agents/chat_completion_agent/chat_completion_agent_function_termination.py) +- [Chat Completion Agent Templating](./agents/chat_completion_agent/chat_completion_agent_prompt_templating.py) +- [Chat Completion Agent Summary History Reducer Agent Chat](./agents/chat_completion_agent/chat_completion_agent_summary_history_reducer_agent_chat.py) +- [Chat Completion Agent Summary History Reducer Single Agent](./agents/chat_completion_agent/chat_completion_agent_summary_history_reducer_single_agent.py) +- [Chat Completion Agent Truncate History Reducer Agent Chat](./agents/chat_completion_agent/chat_completion_agent_truncate_history_reducer_agent_chat.py) +- [Chat Completion Agent Truncate History Reducer Single Agent](./agents/chat_completion_agent/chat_completion_agent_truncate_history_reducer_single_agent.py) #### [Mixed Agent Group Chat](../../semantic_kernel/agents/group_chat/agent_group_chat.py) @@ -53,6 +42,19 @@ - [Mixed Chat Reset](./agents/mixed_chat/mixed_chat_reset.py) - [Mixed Chat Streaming](./agents/mixed_chat/mixed_chat_streaming.py) +#### [OpenAI Assistant Agent](../../semantic_kernel/agents/open_ai/open_ai_assistant_agent.py) + +- [OpenAI Assistant Chart Maker Streaming](./agents/openai_assistant/openai_assistant_chart_maker_streaming.py) +- [OpenAI Assistant Chart Maker](./agents/openai_assistant/openai_assistant_chart_maker.py) +- [OpenAI Assistant File Manipulation Streaming](./agents/openai_assistant/openai_assistant_file_manipulation_streaming.py) +- [OpenAI Assistant File Manipulation](./agents/openai_assistant/openai_assistant_file_manipulation.py) +- [OpenAI Assistant File Manipulation Streaming](./agents/openai_assistant/openai_assistant_file_manipulation_streaming.py) +- [OpenAI Assistant Retrieval](./agents/openai_assistant/openai_assistant_retrieval.py) +- [OpenAI Assistant Streaming](./agents/openai_assistant/openai_assistant_streaming.py) +- [OpenAI Assistant Structured Outputs](./agents/openai_assistant/openai_assistant_structured_outputs.py) +- [OpenAI Assistant Templating Streaming](./agents/openai_assistant/openai_assistant_templating_streaming.py) +- [OpenAI Assistant Vision Streaming](./agents/openai_assistant/openai_assistant_vision_streaming.py) + ### Audio - Using services that support audio-to-text and text-to-audio conversion - [Chat with Audio Input](./audio/01-chat_with_audio_input.py) diff --git a/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_as_kernel_function.py b/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_as_kernel_function.py new file mode 100644 index 000000000000..44126a9558b5 --- /dev/null +++ b/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_as_kernel_function.py @@ -0,0 +1,154 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +from azure.identity.aio import DefaultAzureCredential + +from semantic_kernel import Kernel +from semantic_kernel.agents import ( + AzureAIAgent, + AzureAIAgentSettings, + ChatCompletionAgent, + ChatHistoryAgentThread, +) +from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion +from semantic_kernel.filters import FunctionInvocationContext + + +# Define the auto function invocation filter that will be used by the kernel +async def function_invocation_filter(context: FunctionInvocationContext, next): + """A filter that will be called for each function call in the response.""" + if "messages" not in context.arguments: + await next(context) + return + print(f" Agent [{context.function.name}] called with messages: {context.arguments['messages']}") + await next(context) + print(f" Response from agent [{context.function.name}]: {context.result.value}") + + +async def chat(triage_agent: ChatCompletionAgent, thread: ChatHistoryAgentThread = None) -> bool: + """ + Continuously prompt the user for input and show the assistant's response. + Type 'exit' to exit. + """ + try: + user_input = input("User:> ") + except (KeyboardInterrupt, EOFError): + print("\n\nExiting chat...") + return False + + if user_input.lower().strip() == "exit": + print("\n\nExiting chat...") + return False + + response = await triage_agent.get_response( + messages=user_input, + thread=thread, + ) + + if response: + print(f"Agent :> {response}") + + return True + + +async def main() -> None: + # Create and configure the kernel. + kernel = Kernel() + + # The filter is used for demonstration purposes to show the function invocation. + kernel.add_filter("function_invocation", function_invocation_filter) + + ai_agent_settings = AzureAIAgentSettings.create() + + async with ( + DefaultAzureCredential() as creds, + AzureAIAgent.create_client( + credential=creds, + conn_str=ai_agent_settings.project_connection_string.get_secret_value(), + ) as client, + ): + # Create the agent definition + agent_definition = await client.agents.create_agent( + model=ai_agent_settings.model_deployment_name, + name="BillingAgent", + instructions=( + "You specialize in handling customer questions related to billing issues. " + "This includes clarifying invoice charges, payment methods, billing cycles, " + "explaining fees, addressing discrepancies in billed amounts, updating payment details, " + "assisting with subscription changes, and resolving payment failures. " + "Your goal is to clearly communicate and resolve issues specifically about payments and charges." + ), + ) + + # Create the AzureAI Agent + billing_agent = AzureAIAgent( + client=client, + definition=agent_definition, + ) + + refund_agent = ChatCompletionAgent( + service=AzureChatCompletion(), + name="RefundAgent", + instructions=( + "You specialize in addressing customer inquiries regarding refunds. " + "This includes evaluating eligibility for refunds, explaining refund policies, " + "processing refund requests, providing status updates on refunds, handling complaints related to " + "refunds, and guiding customers through the refund claim process. " + "Your goal is to assist users clearly and empathetically to successfully resolve their refund-related " + "concerns." + ), + ) + + triage_agent = ChatCompletionAgent( + service=AzureChatCompletion(), + kernel=kernel, + name="TriageAgent", + instructions=( + "Your role is to evaluate the user's request and forward it to the appropriate agent based on the " + "nature of the query. Forward requests about charges, billing cycles, payment methods, fees, or " + "payment issues to the BillingAgent. Forward requests concerning refunds, refund eligibility, " + "refund policies, or the status of refunds to the RefundAgent. Your goal is accurate identification " + "of the appropriate specialist to ensure the user receives targeted assistance." + ), + plugins=[billing_agent, refund_agent], + ) + + thread: ChatHistoryAgentThread = None + + print("Welcome to the chat bot!\n Type 'exit' to exit.\n Try to get some billing or refund help.") + + chatting = True + while chatting: + chatting = await chat(triage_agent, thread) + + """ + Sample Output: + + I canceled my subscription but I was still charged. + Agent [BillingAgent] called with messages: I canceled my subscription but I was still charged. + Response from agent [BillingAgent]: I understand how concerning that can be. It's possible that the charge you + received is for a billing cycle that was initiated before your cancellation was processed. Here are a few + steps you can take: + + 1. **Check Cancellation Confirmation**: Make sure you received a confirmation of your cancellation. + This usually comes via email. + + 2. **Billing Cycle**: Review your billing cycle to confirm whether the charge aligns with your subscription terms. + If your billing is monthly, charges can occur even if you cancel before the period ends. + + 3. **Contact Support**: If you believe the charge was made in error, please reach out to customer support for + further clarification and to rectify the situation. + + If you can provide more details about the subscription and when you canceled it, I can help you further understand + the charges. + + Agent :> It's possible that the charge you received is for a billing cycle initiated before your cancellation was + processed. Please check if you received a cancellation confirmation, review your billing cycle, and contact + support for further clarification if you believe the charge was made in error. If you have more details, + I can help you understand the charges better. + """ + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_as_kernel_function.py b/python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_as_kernel_function.py index 6a7394ee14aa..f6dcd7641c94 100644 --- a/python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_as_kernel_function.py +++ b/python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_as_kernel_function.py @@ -18,39 +18,53 @@ async def function_invocation_filter(context: FunctionInvocationContext, next): if "messages" not in context.arguments: await next(context) return - print(f" Agent {context.function.name} called with messages: {context.arguments['messages']}") + print(f" Agent [{context.function.name}] called with messages: {context.arguments['messages']}") await next(context) - print(f" Response from agent {context.function.name}: {context.result.value}") + print(f" Response from agent [{context.function.name}]: {context.result.value}") # Create and configure the kernel. kernel = Kernel() -kernel.add_filter("function_invocation", function_invocation_filter) -english_agent = ChatCompletionAgent( - service=AzureChatCompletion(), - name="EnglishAgent", - instructions="Your job is to help fulfill the user's request in English.", -) +# The filter is used for demonstration purposes to show the function invocation. +kernel.add_filter("function_invocation", function_invocation_filter) -spanish_agent = ChatCompletionAgent( +billing_agent = ChatCompletionAgent( service=AzureChatCompletion(), - name="SpanishAgent", - instructions="Your job is to help fulfill the user's request in Spanish.", + name="BillingAgent", + instructions=( + "You specialize in handling customer questions related to billing issues. " + "This includes clarifying invoice charges, payment methods, billing cycles, " + "explaining fees, addressing discrepancies in billed amounts, updating payment details, " + "assisting with subscription changes, and resolving payment failures. " + "Your goal is to clearly communicate and resolve issues specifically about payments and charges." + ), ) -french_agent = ChatCompletionAgent( +refund_agent = ChatCompletionAgent( service=AzureChatCompletion(), - name="FrenchAgent", - instructions="Your job is to help fulfill the user's request in French.", + name="RefundAgent", + instructions=( + "You specialize in addressing customer inquiries regarding refunds. " + "This includes evaluating eligibility for refunds, explaining refund policies, " + "processing refund requests, providing status updates on refunds, handling complaints related to refunds, " + "and guiding customers through the refund claim process. " + "Your goal is to assist users clearly and empathetically to successfully resolve their refund-related concerns." + ), ) -router_agent = ChatCompletionAgent( +triage_agent = ChatCompletionAgent( service=AzureChatCompletion(), kernel=kernel, - name="Router", - instructions="This agent routes requests to the appropriate agent,", - plugins=[english_agent, spanish_agent, french_agent], + name="TriageAgent", + instructions=( + "Your role is to evaluate the user's request and forward it to the appropriate agent based on the nature of " + "the query. Forward requests about charges, billing cycles, payment methods, fees, or payment issues to the " + "BillingAgent. Forward requests concerning refunds, refund eligibility, refund policies, or the status of " + "refunds to the RefundAgent. Your goal is accurate identification of the appropriate specialist to ensure the " + "user receives targeted assistance." + ), + plugins=[billing_agent, refund_agent], ) thread: ChatHistoryAgentThread = None @@ -71,7 +85,7 @@ async def chat() -> bool: print("\n\nExiting chat...") return False - response = await router_agent.get_response( + response = await triage_agent.get_response( messages=user_input, thread=thread, ) @@ -83,16 +97,43 @@ async def chat() -> bool: """ -Todo +Sample Output: + +User:> I was charged twice for my subscription last month, can I get one of those payments refunded? + Agent [BillingAgent] called with messages: I was charged twice for my subscription last month. + Agent [RefundAgent] called with messages: Can I get one of those payments refunded? + Response from agent RefundAgent: Of course, I'll be happy to help you with your refund inquiry. Could you please + provide a bit more detail about the specific payment you are referring to? For instance, the item or service + purchased, the transaction date, and the reason why you're seeking a refund? This will help me understand your + situation better and provide you with accurate guidance regarding our refund policy and process. + Response from agent BillingAgent: I'm sorry to hear about the duplicate charge. To resolve this issue, could + you please provide the following details: + +1. The date(s) of the transaction(s). +2. The last four digits of the card used for the transaction or any other payment method details. +3. The subscription plan you are on. + +Once I have this information, I can look into the charges and help facilitate a refund for the duplicate transaction. +Let me know if you have any questions in the meantime! + +Agent :> To address your concern about being charged twice and seeking a refund for one of those payments, please + provide the following information: + +1. **Duplicate Charge Details**: Please share the date(s) of the transaction(s), the last four digits of the card used + or details of any other payment method, and the subscription plan you are on. This information will help us verify + the duplicate charge and assist you with a refund. + +2. **Refund Inquiry Details**: Please specify the transaction date, the item or service related to the payment you want + refunded, and the reason why you're seeking a refund. This will allow us to provide accurate guidance concerning + our refund policy and process. + +Once we have these details, we can proceed with resolving the duplicate charge and consider your refund request. If you +have any more questions, feel free to ask! """ async def main() -> None: - print( - "Welcome to the chat bot!\n" - " Type 'exit' to exit.\n" - " Try to get some copy written by the copy writer, make sure to ask it is reviewed.)." - ) + print("Welcome to the chat bot!\n Type 'exit' to exit.\n Try to get some billing or refund help.") chatting = True while chatting: chatting = await chat() diff --git a/python/samples/concepts/agents/chat_completion_agent/chat_completion_function_termination.py b/python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_function_termination.py similarity index 100% rename from python/samples/concepts/agents/chat_completion_agent/chat_completion_function_termination.py rename to python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_function_termination.py diff --git a/python/samples/concepts/agents/chat_completion_agent/chat_completion_prompt_templating.py b/python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_prompt_templating.py similarity index 100% rename from python/samples/concepts/agents/chat_completion_agent/chat_completion_prompt_templating.py rename to python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_prompt_templating.py diff --git a/python/samples/concepts/agents/chat_completion_agent/chat_completion_summary_history_reducer_agent_chat.py b/python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_summary_history_reducer_agent_chat.py similarity index 100% rename from python/samples/concepts/agents/chat_completion_agent/chat_completion_summary_history_reducer_agent_chat.py rename to python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_summary_history_reducer_agent_chat.py diff --git a/python/samples/concepts/agents/chat_completion_agent/chat_completion_summary_history_reducer_single_agent.py b/python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_summary_history_reducer_single_agent.py similarity index 100% rename from python/samples/concepts/agents/chat_completion_agent/chat_completion_summary_history_reducer_single_agent.py rename to python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_summary_history_reducer_single_agent.py diff --git a/python/samples/concepts/agents/chat_completion_agent/chat_completion_truncate_history_reducer_agent_chat.py b/python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_truncate_history_reducer_agent_chat.py similarity index 100% rename from python/samples/concepts/agents/chat_completion_agent/chat_completion_truncate_history_reducer_agent_chat.py rename to python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_truncate_history_reducer_agent_chat.py diff --git a/python/samples/concepts/agents/chat_completion_agent/chat_completion_truncate_history_reducer_single_agent.py b/python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_truncate_history_reducer_single_agent.py similarity index 100% rename from python/samples/concepts/agents/chat_completion_agent/chat_completion_truncate_history_reducer_single_agent.py rename to python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_truncate_history_reducer_single_agent.py diff --git a/python/semantic_kernel/agents/agent.py b/python/semantic_kernel/agents/agent.py index 9b0f352c2e0f..cf43ca65ae2c 100644 --- a/python/semantic_kernel/agents/agent.py +++ b/python/semantic_kernel/agents/agent.py @@ -202,15 +202,14 @@ def _configure_plugins(cls, data: Any) -> Any: if not kernel: kernel = Kernel() for plugin in plugins: - name = Agent._get_plugin_name(plugin) - kernel.add_plugin(plugin, plugin_name=name) + kernel.add_plugin(plugin) data["kernel"] = kernel return data def model_post_init(self, __context: Any) -> None: """Post initialization: create a kernel_function that calls this agent's get_response().""" - @kernel_function(name=self.name, description=self.description) + @kernel_function(name=self.name, description=self.description or self.instructions) async def _as_kernel_function( messages: Annotated[str | list[str], "The user messages for the agent."], instructions_override: Annotated[str | None, "Override agent instructions."] = None, @@ -220,20 +219,16 @@ async def _as_kernel_function( Exposes 'messages' and 'instructions_override'. Internally, we pass them to get_response() for whichever agent is calling it. """ - # Convert a single string to a list if necessary if isinstance(messages, str): messages = [messages] - # Forward to get_response(), passing the optional override response_item = await self.get_response( messages=messages, instructions_override=instructions_override if instructions_override else None ) - - # Return the final .content return response_item.content - # To keep Pydantic happy, it needs to be marked with an underscore - # so it doesn't try to validate the function signature. + # Keep Pydantic happy with the "private" method, otherwise + # it will fail validating the model. setattr(self, "_as_kernel_function", _as_kernel_function) @abstractmethod From 0f63ac2e0b5c7962ccf49ff1cb29d318c94c3572 Mon Sep 17 00:00:00 2001 From: Evan Mattson Date: Wed, 26 Mar 2025 06:33:33 +0900 Subject: [PATCH 3/4] PR feedback --- .../azure_ai_agent_as_kernel_function.py | 8 ++++++++ .../azure_ai_agent/azure_ai_agent_streaming.py | 7 +++++++ .../chat_completion_agent_as_kernel_function.py | 6 +++++- python/semantic_kernel/agents/agent.py | 2 +- python/tests/unit/kernel/test_kernel.py | 16 +++++++++++++--- 5 files changed, 34 insertions(+), 5 deletions(-) diff --git a/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_as_kernel_function.py b/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_as_kernel_function.py index 44126a9558b5..4af74b9fab71 100644 --- a/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_as_kernel_function.py +++ b/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_as_kernel_function.py @@ -14,6 +14,14 @@ from semantic_kernel.connectors.ai.open_ai import AzureChatCompletion from semantic_kernel.filters import FunctionInvocationContext +""" +The following sample demonstrates how to create an Azure AI Agent Agent +and a ChatCompletionAgent use them as tools available for a Triage Agent +to delegate requests to the appropriate agent. A Function Invocation Filter +is used to show the function call content and the function result content so the caller +can see which agent was called and what the response was. +""" + # Define the auto function invocation filter that will be used by the kernel async def function_invocation_filter(context: FunctionInvocationContext, next): diff --git a/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_streaming.py b/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_streaming.py index 3b6d46f992a2..8f1aab490a4d 100644 --- a/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_streaming.py +++ b/python/samples/concepts/agents/azure_ai_agent/azure_ai_agent_streaming.py @@ -8,6 +8,13 @@ from semantic_kernel.agents import AzureAIAgent, AzureAIAgentSettings, AzureAIAgentThread from semantic_kernel.functions import kernel_function +""" +The following sample demonstrates how to create an Azure AI Agent +and use it with streaming responses. The agent is configured to use +a plugin that provides a list of specials from the menu and the price +of the requested menu item. +""" + # Define a sample plugin for the sample class MenuPlugin: diff --git a/python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_as_kernel_function.py b/python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_as_kernel_function.py index f6dcd7641c94..4bc3eb7d4feb 100644 --- a/python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_as_kernel_function.py +++ b/python/samples/concepts/agents/chat_completion_agent/chat_completion_agent_as_kernel_function.py @@ -8,7 +8,11 @@ from semantic_kernel.filters import FunctionInvocationContext """ -Todo +The following sample demonstrates how to create Chat Completion Agents +and use them as tools available for a Triage Agent to delegate requests +to the appropriate agent. A Function Invocation Filter is used to show +the function call content and the function result content so the caller +can see which agent was called and what the response was. """ diff --git a/python/semantic_kernel/agents/agent.py b/python/semantic_kernel/agents/agent.py index cf43ca65ae2c..7d2bbbbcd8bf 100644 --- a/python/semantic_kernel/agents/agent.py +++ b/python/semantic_kernel/agents/agent.py @@ -213,7 +213,7 @@ def model_post_init(self, __context: Any) -> None: async def _as_kernel_function( messages: Annotated[str | list[str], "The user messages for the agent."], instructions_override: Annotated[str | None, "Override agent instructions."] = None, - ) -> Annotated[str, "Agent response."]: + ) -> Annotated[Any, "Agent response."]: """A Minimal universal function for all agents. Exposes 'messages' and 'instructions_override'. diff --git a/python/tests/unit/kernel/test_kernel.py b/python/tests/unit/kernel/test_kernel.py index c40c1f7153d9..f99df0696595 100644 --- a/python/tests/unit/kernel/test_kernel.py +++ b/python/tests/unit/kernel/test_kernel.py @@ -1,6 +1,7 @@ # Copyright (c) Microsoft. All rights reserved. import os +from dataclasses import dataclass from pathlib import Path from typing import Union from unittest.mock import AsyncMock, MagicMock, patch @@ -479,9 +480,18 @@ def test_plugin_no_plugin(kernel: Kernel): kernel.add_plugin(plugin_name="test") -def test_plugin_name_error(kernel: Kernel): - with pytest.raises(ValueError): - kernel.add_plugin(" ", None) +def test_plugin_name_from_class_name(kernel: Kernel): + kernel.add_plugin(" ", None) + assert "str" in kernel.plugins + + +def test_plugin_name_from_name_attribute(kernel: Kernel): + @dataclass + class TestPlugin: + name: str = "test_plugin" + + kernel.add_plugin(TestPlugin(), None) + assert "test_plugin" in kernel.plugins def test_plugin_name_not_string_error(kernel: Kernel): From fb25e9b45d32b7a14e8ebd1f1e0854ecbe354297 Mon Sep 17 00:00:00 2001 From: Evan Mattson Date: Wed, 26 Mar 2025 09:23:04 +0900 Subject: [PATCH 4/4] Fix mypy error --- python/semantic_kernel/agents/agent.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/python/semantic_kernel/agents/agent.py b/python/semantic_kernel/agents/agent.py index 7d2bbbbcd8bf..4ba0e3ac587a 100644 --- a/python/semantic_kernel/agents/agent.py +++ b/python/semantic_kernel/agents/agent.py @@ -223,7 +223,8 @@ async def _as_kernel_function( messages = [messages] response_item = await self.get_response( - messages=messages, instructions_override=instructions_override if instructions_override else None + messages=messages, # type: ignore + instructions_override=instructions_override if instructions_override else None, ) return response_item.content