diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props
index 7878b0d5b359..94e6c8de85ef 100644
--- a/dotnet/Directory.Packages.props
+++ b/dotnet/Directory.Packages.props
@@ -93,7 +93,6 @@
-
@@ -109,7 +108,7 @@
-
+
diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_ServiceSelection.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_ServiceSelection.cs
index a0aa892a7802..be69fe412d5e 100644
--- a/dotnet/samples/Concepts/Agents/ChatCompletion_ServiceSelection.cs
+++ b/dotnet/samples/Concepts/Agents/ChatCompletion_ServiceSelection.cs
@@ -109,11 +109,11 @@ private Kernel CreateKernelWithTwoServices(bool useChatClient)
{
builder.Services.AddKeyedChatClient(
ServiceKeyBad,
- new OpenAI.OpenAIClient("bad-key").AsChatClient(TestConfiguration.OpenAI.ChatModelId));
+ new OpenAI.OpenAIClient("bad-key").GetChatClient(TestConfiguration.OpenAI.ChatModelId).AsIChatClient());
builder.Services.AddKeyedChatClient(
ServiceKeyGood,
- new OpenAI.OpenAIClient(TestConfiguration.OpenAI.ApiKey).AsChatClient(TestConfiguration.OpenAI.ChatModelId));
+ new OpenAI.OpenAIClient(TestConfiguration.OpenAI.ApiKey).GetChatClient(TestConfiguration.OpenAI.ChatModelId).AsIChatClient());
}
else
{
@@ -122,14 +122,16 @@ private Kernel CreateKernelWithTwoServices(bool useChatClient)
new Azure.AI.OpenAI.AzureOpenAIClient(
new Uri(TestConfiguration.AzureOpenAI.Endpoint),
new Azure.AzureKeyCredential("bad-key"))
- .AsChatClient(TestConfiguration.AzureOpenAI.ChatDeploymentName));
+ .GetChatClient(TestConfiguration.AzureOpenAI.ChatDeploymentName)
+ .AsIChatClient());
builder.Services.AddKeyedChatClient(
ServiceKeyGood,
new Azure.AI.OpenAI.AzureOpenAIClient(
new Uri(TestConfiguration.AzureOpenAI.Endpoint),
new Azure.AzureKeyCredential(TestConfiguration.AzureOpenAI.ApiKey))
- .AsChatClient(TestConfiguration.AzureOpenAI.ChatDeploymentName));
+ .GetChatClient(TestConfiguration.AzureOpenAI.ChatDeploymentName)
+ .AsIChatClient());
}
}
else
diff --git a/dotnet/samples/Concepts/Filtering/ChatClient_AutoFunctionInvocationFiltering.cs b/dotnet/samples/Concepts/Filtering/ChatClient_AutoFunctionInvocationFiltering.cs
new file mode 100644
index 000000000000..1e053618a385
--- /dev/null
+++ b/dotnet/samples/Concepts/Filtering/ChatClient_AutoFunctionInvocationFiltering.cs
@@ -0,0 +1,167 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Connectors.OpenAI;
+
+namespace Filtering;
+
+public class ChatClient_AutoFunctionInvocationFiltering(ITestOutputHelper output) : BaseTest(output)
+{
+ ///
+ /// Shows how to use .
+ ///
+ [Fact]
+ public async Task UsingAutoFunctionInvocationFilter()
+ {
+ var builder = Kernel.CreateBuilder();
+
+ builder.AddOpenAIChatClient("gpt-4", TestConfiguration.OpenAI.ApiKey);
+
+ // This filter outputs information about auto function invocation and returns overridden result.
+ builder.Services.AddSingleton(new AutoFunctionInvocationFilter(this.Output));
+
+ var kernel = builder.Build();
+
+ var function = KernelFunctionFactory.CreateFromMethod(() => "Result from function", "MyFunction");
+
+ kernel.ImportPluginFromFunctions("MyPlugin", [function]);
+
+ var executionSettings = new OpenAIPromptExecutionSettings
+ {
+ FunctionChoiceBehavior = FunctionChoiceBehavior.Required([function], autoInvoke: true)
+ };
+
+ var result = await kernel.InvokePromptAsync("Invoke provided function and return result", new(executionSettings));
+
+ Console.WriteLine(result);
+
+ // Output:
+ // Request sequence number: 0
+ // Function sequence number: 0
+ // Total number of functions: 1
+ // Result from auto function invocation filter.
+ }
+
+ ///
+ /// Shows how to get list of function calls by using .
+ ///
+ [Fact]
+ public async Task GetFunctionCallsWithFilterAsync()
+ {
+ var builder = Kernel.CreateBuilder();
+
+ builder.AddOpenAIChatCompletion("gpt-3.5-turbo-1106", TestConfiguration.OpenAI.ApiKey);
+
+ builder.Services.AddSingleton(new FunctionCallsFilter(this.Output));
+
+ var kernel = builder.Build();
+
+ kernel.ImportPluginFromFunctions("HelperFunctions",
+ [
+ kernel.CreateFunctionFromMethod(() => DateTime.UtcNow.ToString("R"), "GetCurrentUtcTime", "Retrieves the current time in UTC."),
+ kernel.CreateFunctionFromMethod((string cityName) =>
+ cityName switch
+ {
+ "Boston" => "61 and rainy",
+ "London" => "55 and cloudy",
+ "Miami" => "80 and sunny",
+ "Paris" => "60 and rainy",
+ "Tokyo" => "50 and sunny",
+ "Sydney" => "75 and sunny",
+ "Tel Aviv" => "80 and sunny",
+ _ => "31 and snowing",
+ }, "GetWeatherForCity", "Gets the current weather for the specified city"),
+ ]);
+
+ var executionSettings = new OpenAIPromptExecutionSettings
+ {
+ FunctionChoiceBehavior = FunctionChoiceBehavior.Auto()
+ };
+
+ await foreach (var chunk in kernel.InvokePromptStreamingAsync("Check current UTC time and return current weather in Boston city.", new(executionSettings)))
+ {
+ Console.WriteLine(chunk.ToString());
+ }
+
+ // Output:
+ // Request #0. Function call: HelperFunctions.GetCurrentUtcTime.
+ // Request #0. Function call: HelperFunctions.GetWeatherForCity.
+ // The current UTC time is {time of execution}, and the current weather in Boston is 61°F and rainy.
+ }
+
+ /// Shows available syntax for auto function invocation filter.
+ private sealed class AutoFunctionInvocationFilter(ITestOutputHelper output) : IAutoFunctionInvocationFilter
+ {
+ public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next)
+ {
+ // Example: get function information
+ var functionName = context.Function.Name;
+
+ // Example: get chat history
+ var chatHistory = context.ChatHistory;
+
+ // Example: get information about all functions which will be invoked
+ var functionCalls = FunctionCallContent.GetFunctionCalls(context.ChatHistory.Last());
+
+ // In function calling functionality there are two loops.
+ // Outer loop is "request" loop - it performs multiple requests to LLM until user ask will be satisfied.
+ // Inner loop is "function" loop - it handles LLM response with multiple function calls.
+
+ // Workflow example:
+ // 1. Request to LLM #1 -> Response with 3 functions to call.
+ // 1.1. Function #1 called.
+ // 1.2. Function #2 called.
+ // 1.3. Function #3 called.
+ // 2. Request to LLM #2 -> Response with 2 functions to call.
+ // 2.1. Function #1 called.
+ // 2.2. Function #2 called.
+
+ // context.RequestSequenceIndex - it's a sequence number of outer/request loop operation.
+ // context.FunctionSequenceIndex - it's a sequence number of inner/function loop operation.
+ // context.FunctionCount - number of functions which will be called per request (based on example above: 3 for first request, 2 for second request).
+
+ // Example: get request sequence index
+ output.WriteLine($"Request sequence index: {context.RequestSequenceIndex}");
+
+ // Example: get function sequence index
+ output.WriteLine($"Function sequence index: {context.FunctionSequenceIndex}");
+
+ // Example: get total number of functions which will be called
+ output.WriteLine($"Total number of functions: {context.FunctionCount}");
+
+ // Calling next filter in pipeline or function itself.
+ // By skipping this call, next filters and function won't be invoked, and function call loop will proceed to the next function.
+ await next(context);
+
+ // Example: get function result
+ var result = context.Result;
+
+ // Example: override function result value
+ context.Result = new FunctionResult(context.Result, "Result from auto function invocation filter");
+
+ // Example: Terminate function invocation
+ context.Terminate = true;
+ }
+ }
+
+ /// Shows how to get list of all function calls per request.
+ private sealed class FunctionCallsFilter(ITestOutputHelper output) : IAutoFunctionInvocationFilter
+ {
+ public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next)
+ {
+ var chatHistory = context.ChatHistory;
+ var functionCalls = FunctionCallContent.GetFunctionCalls(chatHistory.Last()).ToArray();
+
+ if (functionCalls is { Length: > 0 })
+ {
+ foreach (var functionCall in functionCalls)
+ {
+ output.WriteLine($"Request #{context.RequestSequenceIndex}. Function call: {functionCall.PluginName}.{functionCall.FunctionName}.");
+ }
+ }
+
+ await next(context);
+ }
+ }
+}
diff --git a/dotnet/samples/Concepts/Kernel/CustomAIServiceSelector.cs b/dotnet/samples/Concepts/Kernel/CustomAIServiceSelector.cs
index d4631323c24d..02ddbdb3ec35 100644
--- a/dotnet/samples/Concepts/Kernel/CustomAIServiceSelector.cs
+++ b/dotnet/samples/Concepts/Kernel/CustomAIServiceSelector.cs
@@ -10,7 +10,7 @@
namespace KernelExamples;
///
-/// This sample shows how to use a custom AI service selector to select a specific model by matching it's id.
+/// This sample shows how to use a custom AI service selector to select a specific model by matching the model id.
///
public class CustomAIServiceSelector(ITestOutputHelper output) : BaseTest(output)
{
@@ -39,7 +39,8 @@ public async Task UsingCustomSelectToSelectServiceByMatchingModelId()
builder.Services
.AddSingleton(customSelector)
.AddKeyedChatClient("OpenAIChatClient", new OpenAI.OpenAIClient(TestConfiguration.OpenAI.ApiKey)
- .AsChatClient("gpt-4o")); // Add a IChatClient to the kernel
+ .GetChatClient("gpt-4o")
+ .AsIChatClient()); // Add a IChatClient to the kernel
Kernel kernel = builder.Build();
@@ -60,7 +61,6 @@ private sealed class GptAIServiceSelector(string modelNameStartsWith, ITestOutpu
private readonly ITestOutputHelper _output = output;
private readonly string _modelNameStartsWith = modelNameStartsWith;
- ///
private bool TrySelect(
Kernel kernel, KernelFunction function, KernelArguments arguments,
[NotNullWhen(true)] out T? service, out PromptExecutionSettings? serviceSettings) where T : class
@@ -78,7 +78,7 @@ private bool TrySelect(
else if (serviceToCheck is IChatClient chatClient)
{
var metadata = chatClient.GetService();
- serviceModelId = metadata?.ModelId;
+ serviceModelId = metadata?.DefaultModelId;
endpoint = metadata?.ProviderUri?.ToString();
}
diff --git a/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs b/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs
index 8935e4d66d48..39106b957841 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs
@@ -43,25 +43,25 @@ public async Task UseDependencyInjectionToCreateAgentAsync(bool useChatClient)
IChatClient chatClient;
if (this.UseOpenAIConfig)
{
- chatClient = new Microsoft.Extensions.AI.OpenAIChatClient(
- new OpenAI.OpenAIClient(TestConfiguration.OpenAI.ApiKey),
- TestConfiguration.OpenAI.ChatModelId);
+ chatClient = new OpenAI.OpenAIClient(TestConfiguration.OpenAI.ApiKey)
+ .GetChatClient(TestConfiguration.OpenAI.ChatModelId)
+ .AsIChatClient();
}
else if (!string.IsNullOrEmpty(this.ApiKey))
{
- chatClient = new Microsoft.Extensions.AI.OpenAIChatClient(
- openAIClient: new AzureOpenAIClient(
+ chatClient = new AzureOpenAIClient(
endpoint: new Uri(TestConfiguration.AzureOpenAI.Endpoint),
- credential: new ApiKeyCredential(TestConfiguration.AzureOpenAI.ApiKey)),
- modelId: TestConfiguration.AzureOpenAI.ChatModelId);
+ credential: new ApiKeyCredential(TestConfiguration.AzureOpenAI.ApiKey))
+ .GetChatClient(TestConfiguration.OpenAI.ChatModelId)
+ .AsIChatClient();
}
else
{
- chatClient = new Microsoft.Extensions.AI.OpenAIChatClient(
- openAIClient: new AzureOpenAIClient(
+ chatClient = new AzureOpenAIClient(
endpoint: new Uri(TestConfiguration.AzureOpenAI.Endpoint),
- credential: new AzureCliCredential()),
- modelId: TestConfiguration.AzureOpenAI.ChatModelId);
+ credential: new AzureCliCredential())
+ .GetChatClient(TestConfiguration.OpenAI.ChatModelId)
+ .AsIChatClient();
}
var functionCallingChatClient = chatClient!.AsKernelFunctionInvokingChatClient();
diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Connectors.OpenAI.UnitTests.csproj b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Connectors.OpenAI.UnitTests.csproj
index 57f983dd5c9b..04d35b9e6561 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Connectors.OpenAI.UnitTests.csproj
+++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Connectors.OpenAI.UnitTests.csproj
@@ -96,6 +96,15 @@
Always
+
+ Always
+
+
+ Always
+
+
+ Always
+
diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/AutoFunctionInvocationFilterChatClientTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/AutoFunctionInvocationFilterChatClientTests.cs
new file mode 100644
index 000000000000..dd8d94c99824
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/AutoFunctionInvocationFilterChatClientTests.cs
@@ -0,0 +1,793 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Net;
+using System.Net.Http;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.Extensions.AI;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Xunit;
+
+namespace SemanticKernel.Connectors.OpenAI.UnitTests.Core;
+
+public sealed class AutoFunctionInvocationFilterChatClientTests : IDisposable
+{
+ private readonly MultipleHttpMessageHandlerStub _messageHandlerStub;
+ private readonly HttpClient _httpClient;
+
+ public AutoFunctionInvocationFilterChatClientTests()
+ {
+ this._messageHandlerStub = new MultipleHttpMessageHandlerStub();
+
+ this._httpClient = new HttpClient(this._messageHandlerStub, false);
+ }
+
+ [Fact]
+ public async Task FiltersAreExecutedCorrectlyAsync()
+ {
+ // Arrange
+ int filterInvocations = 0;
+ int functionInvocations = 0;
+ int[] expectedRequestSequenceNumbers = [0, 0, 1, 1];
+ int[] expectedFunctionSequenceNumbers = [0, 1, 0, 1];
+ List requestSequenceNumbers = [];
+ List functionSequenceNumbers = [];
+ Kernel? contextKernel = null;
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { functionInvocations++; return parameter; }, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { functionInvocations++; return parameter; }, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
+ {
+ contextKernel = context.Kernel;
+
+ if (context.ChatHistory.Last() is OpenAIChatMessageContent content)
+ {
+ Assert.Equal(2, content.ToolCalls.Count);
+ }
+
+ requestSequenceNumbers.Add(context.RequestSequenceIndex);
+ functionSequenceNumbers.Add(context.FunctionSequenceIndex);
+
+ await next(context);
+
+ filterInvocations++;
+ });
+
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses();
+
+ // Act
+ var result = await kernel.InvokePromptAsync("Test prompt", new(new OpenAIPromptExecutionSettings
+ {
+ FunctionChoiceBehavior = FunctionChoiceBehavior.Auto()
+ }));
+
+ // Assert
+ Assert.Equal(4, filterInvocations);
+ Assert.Equal(4, functionInvocations);
+ Assert.Equal(expectedRequestSequenceNumbers, requestSequenceNumbers);
+ Assert.Equal(expectedFunctionSequenceNumbers, functionSequenceNumbers);
+ Assert.Same(kernel, contextKernel);
+ Assert.Equal("Test chat response", result.ToString());
+ }
+
+ [Fact]
+ public async Task FunctionSequenceIndexIsCorrectForConcurrentCallsAsync()
+ {
+ // Arrange
+ List functionSequenceNumbers = [];
+ List expectedFunctionSequenceNumbers = [0, 1, 0, 1];
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { return parameter; }, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { return parameter; }, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
+ {
+ functionSequenceNumbers.Add(context.FunctionSequenceIndex);
+
+ await next(context);
+ });
+
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses();
+
+ // Act
+ var result = await kernel.InvokePromptAsync("Test prompt", new(new OpenAIPromptExecutionSettings
+ {
+ FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(options: new()
+ {
+ AllowParallelCalls = true,
+ AllowConcurrentInvocation = true
+ })
+ }));
+
+ // Assert
+ Assert.Equal(expectedFunctionSequenceNumbers, functionSequenceNumbers);
+ }
+
+ [Fact]
+ public async Task FiltersAreExecutedCorrectlyOnStreamingAsync()
+ {
+ // Arrange
+ int filterInvocations = 0;
+ int functionInvocations = 0;
+ List requestSequenceNumbers = [];
+ List functionSequenceNumbers = [];
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { functionInvocations++; return parameter; }, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { functionInvocations++; return parameter; }, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
+ {
+ if (context.ChatHistory.Last() is OpenAIChatMessageContent content)
+ {
+ Assert.Equal(2, content.ToolCalls.Count);
+ }
+
+ requestSequenceNumbers.Add(context.RequestSequenceIndex);
+ functionSequenceNumbers.Add(context.FunctionSequenceIndex);
+
+ await next(context);
+
+ filterInvocations++;
+ });
+
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses();
+
+ var executionSettings = new OpenAIPromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() };
+
+ // Act
+ await foreach (var item in kernel.InvokePromptStreamingAsync("Test prompt", new(executionSettings)))
+ { }
+
+ // Assert
+ Assert.Equal(4, filterInvocations);
+ Assert.Equal(4, functionInvocations);
+ Assert.Equal([0, 0, 1, 1], requestSequenceNumbers);
+ Assert.Equal([0, 1, 0, 1], functionSequenceNumbers);
+ }
+
+ [Fact]
+ public async Task DifferentWaysOfAddingFiltersWorkCorrectlyAsync()
+ {
+ // Arrange
+ var executionOrder = new List();
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var filter1 = new AutoFunctionInvocationFilter(async (context, next) =>
+ {
+ executionOrder.Add("Filter1-Invoking");
+ await next(context);
+ });
+
+ var filter2 = new AutoFunctionInvocationFilter(async (context, next) =>
+ {
+ executionOrder.Add("Filter2-Invoking");
+ await next(context);
+ });
+
+ var builder = Kernel.CreateBuilder();
+
+ builder.Plugins.Add(plugin);
+
+ builder.Services.AddOpenAIChatClient("model-id", "test-api-key", "organization-id", httpClient: this._httpClient);
+
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses();
+
+ // Act
+
+ // Case #1 - Add filter to services
+ builder.Services.AddSingleton(filter1);
+
+ var kernel = builder.Build();
+
+ // Case #2 - Add filter to kernel
+ kernel.AutoFunctionInvocationFilters.Add(filter2);
+
+ var result = await kernel.InvokePromptAsync("Test prompt", new(new PromptExecutionSettings
+ {
+ FunctionChoiceBehavior = FunctionChoiceBehavior.Auto()
+ }));
+
+ // Assert
+ Assert.Equal("Filter1-Invoking", executionOrder[0]);
+ Assert.Equal("Filter2-Invoking", executionOrder[1]);
+ }
+
+ [Theory]
+ [InlineData(true)]
+ [InlineData(false)]
+ public async Task MultipleFiltersAreExecutedInOrderAsync(bool isStreaming)
+ {
+ // Arrange
+ var executionOrder = new List();
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var filter1 = new AutoFunctionInvocationFilter(async (context, next) =>
+ {
+ executionOrder.Add("Filter1-Invoking");
+ await next(context);
+ executionOrder.Add("Filter1-Invoked");
+ });
+
+ var filter2 = new AutoFunctionInvocationFilter(async (context, next) =>
+ {
+ executionOrder.Add("Filter2-Invoking");
+ await next(context);
+ executionOrder.Add("Filter2-Invoked");
+ });
+
+ var filter3 = new AutoFunctionInvocationFilter(async (context, next) =>
+ {
+ executionOrder.Add("Filter3-Invoking");
+ await next(context);
+ executionOrder.Add("Filter3-Invoked");
+ });
+
+ var builder = Kernel.CreateBuilder();
+
+ builder.Plugins.Add(plugin);
+
+ builder.Services.AddOpenAIChatClient("model-id", "test-api-key", "organization-id", httpClient: this._httpClient);
+
+ builder.Services.AddSingleton(filter1);
+ builder.Services.AddSingleton(filter2);
+ builder.Services.AddSingleton(filter3);
+
+ var kernel = builder.Build();
+
+ var settings = new PromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() };
+
+ // Act
+ if (isStreaming)
+ {
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses();
+
+ await foreach (var item in kernel.InvokePromptStreamingAsync("Test prompt", new(settings)))
+ { }
+ }
+ else
+ {
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses();
+
+ await kernel.InvokePromptAsync("Test prompt", new(settings));
+ }
+
+ // Assert
+ Assert.Equal("Filter1-Invoking", executionOrder[0]);
+ Assert.Equal("Filter2-Invoking", executionOrder[1]);
+ Assert.Equal("Filter3-Invoking", executionOrder[2]);
+ Assert.Equal("Filter3-Invoked", executionOrder[3]);
+ Assert.Equal("Filter2-Invoked", executionOrder[4]);
+ Assert.Equal("Filter1-Invoked", executionOrder[5]);
+ }
+
+ [Fact]
+ public async Task FilterCanOverrideArgumentsAsync()
+ {
+ // Arrange
+ const string NewValue = "NewValue";
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { return parameter; }, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { return parameter; }, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
+ {
+ context.Arguments!["parameter"] = NewValue;
+ await next(context);
+ context.Terminate = true;
+ });
+
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses();
+
+ // Act
+ var result = await kernel.InvokePromptAsync("Test prompt", new(new OpenAIPromptExecutionSettings
+ {
+ FunctionChoiceBehavior = FunctionChoiceBehavior.Auto()
+ }));
+
+ // Assert
+ var chatResponse = Assert.IsType(result.GetValue());
+ Assert.NotNull(chatResponse);
+
+ var lastFunctionResult = GetLastFunctionResultFromChatResponse(chatResponse);
+ Assert.NotNull(lastFunctionResult);
+ Assert.Equal("NewValue", lastFunctionResult.ToString());
+ }
+
+ [Fact]
+ public async Task FilterCanHandleExceptionAsync()
+ {
+ // Arrange
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { throw new KernelException("Exception from Function1"); }, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => "Result from Function2", "Function2");
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
+ {
+ try
+ {
+ await next(context);
+ }
+ catch (KernelException exception)
+ {
+ Assert.Equal("Exception from Function1", exception.Message);
+ context.Result = new FunctionResult(context.Result, "Result from filter");
+ }
+ });
+
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses();
+
+ var chatClient = kernel.GetRequiredService();
+
+ var executionSettings = new PromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() };
+ var options = executionSettings.ToChatOptions(kernel);
+ List messageList = [new(ChatRole.System, "System message")];
+
+ // Act
+ var resultMessages = await chatClient.GetResponseAsync(messageList, options, CancellationToken.None);
+
+ // Assert
+ var firstToolMessage = resultMessages.Messages.First(m => m.Role == ChatRole.Tool);
+ Assert.NotNull(firstToolMessage);
+ var firstFunctionResult = firstToolMessage.Contents[^2] as Microsoft.Extensions.AI.FunctionResultContent;
+ var secondFunctionResult = firstToolMessage.Contents[^1] as Microsoft.Extensions.AI.FunctionResultContent;
+
+ Assert.NotNull(firstFunctionResult);
+ Assert.NotNull(secondFunctionResult);
+ Assert.Equal("Result from filter", firstFunctionResult.Result!.ToString());
+ Assert.Equal("Result from Function2", secondFunctionResult.Result!.ToString());
+ }
+
+ [Fact]
+ public async Task FilterCanHandleExceptionOnStreamingAsync()
+ {
+ // Arrange
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { throw new KernelException("Exception from Function1"); }, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => "Result from Function2", "Function2");
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
+ {
+ try
+ {
+ await next(context);
+ }
+ catch (KernelException)
+ {
+ context.Result = new FunctionResult(context.Result, "Result from filter");
+ }
+ });
+
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses();
+
+ var chatClient = kernel.GetRequiredService();
+
+ var executionSettings = new PromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() };
+ var options = executionSettings.ToChatOptions(kernel);
+ List messageList = [];
+
+ // Act
+ List streamingContent = [];
+ await foreach (var update in chatClient.GetStreamingResponseAsync(messageList, options, CancellationToken.None))
+ {
+ streamingContent.Add(update);
+ }
+ var chatResponse = streamingContent.ToChatResponse();
+
+ // Assert
+ var firstToolMessage = chatResponse.Messages.First(m => m.Role == ChatRole.Tool);
+ Assert.NotNull(firstToolMessage);
+ var firstFunctionResult = firstToolMessage.Contents[^2] as Microsoft.Extensions.AI.FunctionResultContent;
+ var secondFunctionResult = firstToolMessage.Contents[^1] as Microsoft.Extensions.AI.FunctionResultContent;
+
+ Assert.NotNull(firstFunctionResult);
+ Assert.NotNull(secondFunctionResult);
+ Assert.Equal("Result from filter", firstFunctionResult.Result!.ToString());
+ Assert.Equal("Result from Function2", secondFunctionResult.Result!.ToString());
+ }
+
+ [Fact]
+ public async Task FiltersCanSkipFunctionExecutionAsync()
+ {
+ // Arrange
+ int filterInvocations = 0;
+ int firstFunctionInvocations = 0;
+ int secondFunctionInvocations = 0;
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { firstFunctionInvocations++; return parameter; }, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { secondFunctionInvocations++; return parameter; }, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
+ {
+ // Filter delegate is invoked only for second function, the first one should be skipped.
+ if (context.Function.Name == "MyPlugin_Function2")
+ {
+ await next(context);
+ }
+
+ filterInvocations++;
+ });
+
+ using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(File.ReadAllText("TestData/filters_chatclient_multiple_function_calls_test_response.json")) };
+ using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.json")) };
+
+ this._messageHandlerStub.ResponsesToReturn = [response1, response2];
+
+ // Act
+ var result = await kernel.InvokePromptAsync("Test prompt", new(new PromptExecutionSettings
+ {
+ FunctionChoiceBehavior = FunctionChoiceBehavior.Auto()
+ }));
+
+ // Assert
+ Assert.Equal(2, filterInvocations);
+ Assert.Equal(0, firstFunctionInvocations);
+ Assert.Equal(1, secondFunctionInvocations);
+ }
+
+ [Fact]
+ public async Task PreFilterCanTerminateOperationAsync()
+ {
+ // Arrange
+ int firstFunctionInvocations = 0;
+ int secondFunctionInvocations = 0;
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { firstFunctionInvocations++; return parameter; }, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { secondFunctionInvocations++; return parameter; }, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
+ {
+ // Terminating before first function, so all functions won't be invoked.
+ context.Terminate = true;
+
+ await next(context);
+ });
+
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses();
+
+ // Act
+ await kernel.InvokePromptAsync("Test prompt", new(new PromptExecutionSettings
+ {
+ FunctionChoiceBehavior = FunctionChoiceBehavior.Auto()
+ }));
+
+ // Assert
+ Assert.Equal(0, firstFunctionInvocations);
+ Assert.Equal(0, secondFunctionInvocations);
+ }
+
+ [Fact]
+ public async Task PreFilterCanTerminateOperationOnStreamingAsync()
+ {
+ // Arrange
+ int firstFunctionInvocations = 0;
+ int secondFunctionInvocations = 0;
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { firstFunctionInvocations++; return parameter; }, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { secondFunctionInvocations++; return parameter; }, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
+ {
+ // Terminating before first function, so all functions won't be invoked.
+ context.Terminate = true;
+
+ await next(context);
+ });
+
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses();
+
+ var executionSettings = new PromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() };
+
+ // Act
+ await foreach (var item in kernel.InvokePromptStreamingAsync("Test prompt", new(executionSettings)))
+ { }
+
+ // Assert
+ Assert.Equal(0, firstFunctionInvocations);
+ Assert.Equal(0, secondFunctionInvocations);
+ }
+
+ [Fact]
+ public async Task PostFilterCanTerminateOperationAsync()
+ {
+ // Arrange
+ int firstFunctionInvocations = 0;
+ int secondFunctionInvocations = 0;
+ List requestSequenceNumbers = [];
+ List functionSequenceNumbers = [];
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { firstFunctionInvocations++; return parameter; }, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { secondFunctionInvocations++; return parameter; }, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
+ {
+ requestSequenceNumbers.Add(context.RequestSequenceIndex);
+ functionSequenceNumbers.Add(context.FunctionSequenceIndex);
+
+ await next(context);
+
+ // Terminating after first function, so second function won't be invoked.
+ context.Terminate = true;
+ });
+
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses();
+
+ // Act
+ var functionResult = await kernel.InvokePromptAsync("Test prompt", new(new PromptExecutionSettings
+ {
+ FunctionChoiceBehavior = FunctionChoiceBehavior.Auto()
+ }));
+
+ // Assert
+ Assert.Equal(1, firstFunctionInvocations);
+ Assert.Equal(0, secondFunctionInvocations);
+ Assert.Equal([0], requestSequenceNumbers);
+ Assert.Equal([0], functionSequenceNumbers);
+
+ // Results of function invoked before termination should be returned
+ var chatResponse = functionResult.GetValue();
+ Assert.NotNull(chatResponse);
+
+ var result = GetLastFunctionResultFromChatResponse(chatResponse);
+ Assert.NotNull(result);
+ Assert.Equal("function1-value", result.ToString());
+ }
+
+ [Fact]
+ public async Task PostFilterCanTerminateOperationOnStreamingAsync()
+ {
+ // Arrange
+ int firstFunctionInvocations = 0;
+ int secondFunctionInvocations = 0;
+ List requestSequenceNumbers = [];
+ List functionSequenceNumbers = [];
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => { firstFunctionInvocations++; return parameter; }, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => { secondFunctionInvocations++; return parameter; }, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var kernel = this.GetKernelWithFilter(plugin, async (context, next) =>
+ {
+ requestSequenceNumbers.Add(context.RequestSequenceIndex);
+ functionSequenceNumbers.Add(context.FunctionSequenceIndex);
+
+ await next(context);
+
+ // Terminating after first function, so second function won't be invoked.
+ context.Terminate = true;
+ });
+
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses();
+
+ var executionSettings = new PromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() };
+
+ List streamingContent = [];
+
+ // Act
+ await foreach (var update in kernel.InvokePromptStreamingAsync("Test prompt", new(executionSettings)))
+ {
+ streamingContent.Add(update);
+ }
+
+ // Assert
+ Assert.Equal(1, firstFunctionInvocations);
+ Assert.Equal(0, secondFunctionInvocations);
+ Assert.Equal([0], requestSequenceNumbers);
+ Assert.Equal([0], functionSequenceNumbers);
+
+ // Results of function invoked before termination should be returned
+ Assert.Equal(4, streamingContent.Count);
+
+ var chatResponse = streamingContent.ToChatResponse();
+ Assert.NotNull(chatResponse);
+
+ var result = GetLastFunctionResultFromChatResponse(chatResponse);
+ Assert.NotNull(result);
+ Assert.Equal("function1-value", result.ToString());
+ }
+
+ [Theory]
+ [InlineData(true)]
+ [InlineData(false)]
+ public async Task FilterContextHasValidStreamingFlagAsync(bool isStreaming)
+ {
+ // Arrange
+ bool? actualStreamingFlag = null;
+
+ var function1 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function1");
+ var function2 = KernelFunctionFactory.CreateFromMethod((string parameter) => parameter, "Function2");
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]);
+
+ var filter = new AutoFunctionInvocationFilter(async (context, next) =>
+ {
+ actualStreamingFlag = context.IsStreaming;
+ await next(context);
+ });
+
+ var builder = Kernel.CreateBuilder();
+
+ builder.Plugins.Add(plugin);
+
+ builder.Services.AddOpenAIChatClient("model-id", "test-api-key", "organization-id", httpClient: this._httpClient);
+
+ builder.Services.AddSingleton(filter);
+
+ var kernel = builder.Build();
+
+ var settings = new PromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() };
+
+ // Act
+ if (isStreaming)
+ {
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses();
+
+ await kernel.InvokePromptStreamingAsync("Test prompt", new(settings)).ToListAsync();
+ }
+ else
+ {
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses();
+
+ await kernel.InvokePromptAsync("Test prompt", new(settings));
+ }
+
+ // Assert
+ Assert.Equal(isStreaming, actualStreamingFlag);
+ }
+
+ [Fact]
+ public async Task PromptExecutionSettingsArePropagatedFromInvokePromptToFilterContextAsync()
+ {
+ // Arrange
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingResponses();
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [KernelFunctionFactory.CreateFromMethod(() => { }, "Function1")]);
+
+ AutoFunctionInvocationContext? actualContext = null;
+
+ var kernel = this.GetKernelWithFilter(plugin, (context, next) =>
+ {
+ actualContext = context;
+ return Task.CompletedTask;
+ });
+
+ var expectedExecutionSettings = new PromptExecutionSettings
+ {
+ FunctionChoiceBehavior = FunctionChoiceBehavior.Auto()
+ };
+
+ // Act
+ var result = await kernel.InvokePromptAsync("Test prompt", new(expectedExecutionSettings));
+
+ // Assert
+ Assert.NotNull(actualContext);
+ Assert.Same(expectedExecutionSettings, actualContext!.ExecutionSettings);
+ }
+
+ [Fact]
+ public async Task PromptExecutionSettingsArePropagatedFromInvokePromptStreamingToFilterContextAsync()
+ {
+ // Arrange
+ this._messageHandlerStub.ResponsesToReturn = GetFunctionCallingStreamingResponses();
+
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [KernelFunctionFactory.CreateFromMethod(() => { }, "Function1")]);
+
+ AutoFunctionInvocationContext? actualContext = null;
+
+ var kernel = this.GetKernelWithFilter(plugin, (context, next) =>
+ {
+ actualContext = context;
+ return Task.CompletedTask;
+ });
+
+ var expectedExecutionSettings = new PromptExecutionSettings
+ {
+ FunctionChoiceBehavior = FunctionChoiceBehavior.Auto()
+ };
+
+ // Act
+ await foreach (var item in kernel.InvokePromptStreamingAsync("Test prompt", new(expectedExecutionSettings)))
+ { }
+
+ // Assert
+ Assert.NotNull(actualContext);
+ Assert.Same(expectedExecutionSettings, actualContext!.ExecutionSettings);
+ }
+
+ public void Dispose()
+ {
+ this._httpClient.Dispose();
+ this._messageHandlerStub.Dispose();
+ }
+
+ #region private
+
+ private static object? GetLastFunctionResultFromChatResponse(ChatResponse chatResponse)
+ {
+ Assert.NotEmpty(chatResponse.Messages);
+ var chatMessage = chatResponse.Messages[^1];
+
+ Assert.NotEmpty(chatMessage.Contents);
+ Assert.Contains(chatMessage.Contents, c => c is Microsoft.Extensions.AI.FunctionResultContent);
+
+ var resultContent = (Microsoft.Extensions.AI.FunctionResultContent)chatMessage.Contents.Last(c => c is Microsoft.Extensions.AI.FunctionResultContent);
+ return resultContent.Result;
+ }
+
+#pragma warning disable CA2000 // Dispose objects before losing scope
+ private static List GetFunctionCallingResponses()
+ {
+ return [
+ new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/filters_chatclient_multiple_function_calls_test_response.json")) },
+ new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/filters_chatclient_multiple_function_calls_test_response.json")) },
+ new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_test_response.json")) }
+ ];
+ }
+
+ private static List GetFunctionCallingStreamingResponses()
+ {
+ return [
+ new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/filters_chatclient_streaming_multiple_function_calls_test_response.txt")) },
+ new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/filters_chatclient_streaming_multiple_function_calls_test_response.txt")) },
+ new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_test_response.txt")) }
+ ];
+ }
+#pragma warning restore CA2000
+
+ private Kernel GetKernelWithFilter(
+ KernelPlugin plugin,
+ Func, Task>? onAutoFunctionInvocation)
+ {
+ var builder = Kernel.CreateBuilder();
+ var filter = new AutoFunctionInvocationFilter(onAutoFunctionInvocation);
+
+ builder.Plugins.Add(plugin);
+ builder.Services.AddSingleton(filter);
+
+ builder.AddOpenAIChatClient("model-id", "test-api-key", "organization-id", httpClient: this._httpClient);
+
+ return builder.Build();
+ }
+
+ private sealed class AutoFunctionInvocationFilter(
+ Func, Task>? onAutoFunctionInvocation) : IAutoFunctionInvocationFilter
+ {
+ private readonly Func, Task>? _onAutoFunctionInvocation = onAutoFunctionInvocation;
+
+ public Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next) =>
+ this._onAutoFunctionInvocation?.Invoke(context, next) ?? Task.CompletedTask;
+ }
+
+ #endregion
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/AutoFunctionInvocationFilterTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/AutoFunctionInvocationFilterTests.cs
index 19992be01667..b308206b12d5 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/AutoFunctionInvocationFilterTests.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Core/AutoFunctionInvocationFilterTests.cs
@@ -312,7 +312,7 @@ public async Task FilterCanOverrideArgumentsAsync()
// Act
var result = await kernel.InvokePromptAsync("Test prompt", new(new OpenAIPromptExecutionSettings
{
- ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions
+ FunctionChoiceBehavior = FunctionChoiceBehavior.Auto()
}));
// Assert
@@ -596,7 +596,7 @@ public async Task PostFilterCanTerminateOperationOnStreamingAsync()
Assert.Equal([0], requestSequenceNumbers);
Assert.Equal([0], functionSequenceNumbers);
- // Results of function invoked before termination should be returned
+ // Results of function invoked before termination should be returned
Assert.Equal(3, streamingContent.Count);
var lastMessageContent = streamingContent[^1] as StreamingChatMessageContent;
diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/OpenAIKernelBuilderExtensionsChatClientTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/OpenAIKernelBuilderExtensionsChatClientTests.cs
new file mode 100644
index 000000000000..437d347aa194
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/OpenAIKernelBuilderExtensionsChatClientTests.cs
@@ -0,0 +1,92 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Net.Http;
+using Microsoft.Extensions.AI;
+using Microsoft.SemanticKernel;
+using OpenAI;
+using Xunit;
+
+namespace SemanticKernel.Connectors.OpenAI.UnitTests.Extensions;
+
+public class OpenAIKernelBuilderExtensionsChatClientTests
+{
+ [Fact]
+ public void AddOpenAIChatClientNullArgsThrow()
+ {
+ // Arrange
+ IKernelBuilder builder = null!;
+ string modelId = "gpt-3.5-turbo";
+ string apiKey = "test_api_key";
+ string orgId = "test_org_id";
+ string serviceId = "test_service_id";
+
+ // Act & Assert
+ var exception = Assert.Throws(() => builder.AddOpenAIChatClient(modelId, apiKey, orgId, serviceId));
+ Assert.Equal("builder", exception.ParamName);
+
+ exception = Assert.Throws(() => builder.AddOpenAIChatClient(modelId, new OpenAIClient(apiKey), serviceId));
+ Assert.Equal("builder", exception.ParamName);
+
+ using var httpClient = new HttpClient();
+ exception = Assert.Throws(() => builder.AddOpenAIChatClient(modelId, new Uri("http://localhost"), apiKey, orgId, serviceId, httpClient));
+ Assert.Equal("builder", exception.ParamName);
+ }
+
+ [Fact]
+ public void AddOpenAIChatClientDefaultValidParametersRegistersService()
+ {
+ // Arrange
+ var builder = Kernel.CreateBuilder();
+ string modelId = "gpt-3.5-turbo";
+ string apiKey = "test_api_key";
+ string orgId = "test_org_id";
+ string serviceId = "test_service_id";
+
+ // Act
+ builder.AddOpenAIChatClient(modelId, apiKey, orgId, serviceId);
+
+ // Assert
+ var kernel = builder.Build();
+ Assert.NotNull(kernel.GetRequiredService());
+ Assert.NotNull(kernel.GetRequiredService(serviceId));
+ }
+
+ [Fact]
+ public void AddOpenAIChatClientOpenAIClientValidParametersRegistersService()
+ {
+ // Arrange
+ var builder = Kernel.CreateBuilder();
+ string modelId = "gpt-3.5-turbo";
+ var openAIClient = new OpenAIClient("test_api_key");
+ string serviceId = "test_service_id";
+
+ // Act
+ builder.AddOpenAIChatClient(modelId, openAIClient, serviceId);
+
+ // Assert
+ var kernel = builder.Build();
+ Assert.NotNull(kernel.GetRequiredService());
+ Assert.NotNull(kernel.GetRequiredService(serviceId));
+ }
+
+ [Fact]
+ public void AddOpenAIChatClientCustomEndpointValidParametersRegistersService()
+ {
+ // Arrange
+ var builder = Kernel.CreateBuilder();
+ string modelId = "gpt-3.5-turbo";
+ string apiKey = "test_api_key";
+ string orgId = "test_org_id";
+ string serviceId = "test_service_id";
+ using var httpClient = new HttpClient();
+
+ // Act
+ builder.AddOpenAIChatClient(modelId, new Uri("http://localhost"), apiKey, orgId, serviceId, httpClient);
+
+ // Assert
+ var kernel = builder.Build();
+ Assert.NotNull(kernel.GetRequiredService());
+ Assert.NotNull(kernel.GetRequiredService(serviceId));
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/OpenAIServiceCollectionExtensionsChatClientTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/OpenAIServiceCollectionExtensionsChatClientTests.cs
new file mode 100644
index 000000000000..7a3888b95f30
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Extensions/OpenAIServiceCollectionExtensionsChatClientTests.cs
@@ -0,0 +1,114 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Net.Http;
+using Microsoft.Extensions.AI;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.SemanticKernel;
+using OpenAI;
+using Xunit;
+
+namespace SemanticKernel.Connectors.OpenAI.UnitTests.Extensions;
+
+public class OpenAIServiceCollectionExtensionsChatClientTests
+{
+ [Fact]
+ public void AddOpenAIChatClientNullArgsThrow()
+ {
+ // Arrange
+ ServiceCollection services = null!;
+ string modelId = "gpt-3.5-turbo";
+ string apiKey = "test_api_key";
+ string orgId = "test_org_id";
+ string serviceId = "test_service_id";
+
+ // Act & Assert
+ var exception = Assert.Throws(() => services.AddOpenAIChatClient(modelId, apiKey, orgId, serviceId));
+ Assert.Equal("services", exception.ParamName);
+
+ exception = Assert.Throws(() => services.AddOpenAIChatClient(modelId, new OpenAIClient(apiKey), serviceId));
+ Assert.Equal("services", exception.ParamName);
+
+ using var httpClient = new HttpClient();
+ exception = Assert.Throws(() => services.AddOpenAIChatClient(modelId, new Uri("http://localhost"), apiKey, orgId, serviceId, httpClient));
+ Assert.Equal("services", exception.ParamName);
+ }
+
+ [Fact]
+ public void AddOpenAIChatClientDefaultValidParametersRegistersService()
+ {
+ // Arrange
+ var services = new ServiceCollection();
+ string modelId = "gpt-3.5-turbo";
+ string apiKey = "test_api_key";
+ string orgId = "test_org_id";
+ string serviceId = "test_service_id";
+
+ // Act
+ services.AddOpenAIChatClient(modelId, apiKey, orgId, serviceId);
+
+ // Assert
+ var serviceProvider = services.BuildServiceProvider();
+ var chatClient = serviceProvider.GetKeyedService(serviceId);
+ Assert.NotNull(chatClient);
+ }
+
+ [Fact]
+ public void AddOpenAIChatClientOpenAIClientValidParametersRegistersService()
+ {
+ // Arrange
+ var services = new ServiceCollection();
+ string modelId = "gpt-3.5-turbo";
+ var openAIClient = new OpenAIClient("test_api_key");
+ string serviceId = "test_service_id";
+
+ // Act
+ services.AddOpenAIChatClient(modelId, openAIClient, serviceId);
+
+ // Assert
+ var serviceProvider = services.BuildServiceProvider();
+ var chatClient = serviceProvider.GetKeyedService(serviceId);
+ Assert.NotNull(chatClient);
+ }
+
+ [Fact]
+ public void AddOpenAIChatClientCustomEndpointValidParametersRegistersService()
+ {
+ // Arrange
+ var services = new ServiceCollection();
+ string modelId = "gpt-3.5-turbo";
+ string apiKey = "test_api_key";
+ string orgId = "test_org_id";
+ string serviceId = "test_service_id";
+ using var httpClient = new HttpClient();
+ // Act
+ services.AddOpenAIChatClient(modelId, new Uri("http://localhost"), apiKey, orgId, serviceId, httpClient);
+ // Assert
+ var serviceProvider = services.BuildServiceProvider();
+ var chatClient = serviceProvider.GetKeyedService(serviceId);
+ Assert.NotNull(chatClient);
+ }
+
+ [Fact]
+ public void AddOpenAIChatClientWorksWithKernel()
+ {
+ var services = new ServiceCollection();
+ string modelId = "gpt-3.5-turbo";
+ string apiKey = "test_api_key";
+ string orgId = "test_org_id";
+ string serviceId = "test_service_id";
+
+ // Act
+ services.AddOpenAIChatClient(modelId, apiKey, orgId, serviceId);
+ services.AddKernel();
+
+ var serviceProvider = services.BuildServiceProvider();
+ var kernel = serviceProvider.GetRequiredService();
+
+ var serviceFromCollection = serviceProvider.GetKeyedService(serviceId);
+ var serviceFromKernel = kernel.GetRequiredService(serviceId);
+
+ Assert.NotNull(serviceFromKernel);
+ Assert.Same(serviceFromCollection, serviceFromKernel);
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_chatclient_multiple_function_calls_test_response.txt b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_chatclient_multiple_function_calls_test_response.txt
new file mode 100644
index 000000000000..17ce94647fd5
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/chat_completion_streaming_chatclient_multiple_function_calls_test_response.txt
@@ -0,0 +1,9 @@
+data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":0,"id":"1","type":"function","function":{"name":"MyPlugin_GetCurrentWeather","arguments":"{\n\"location\": \"Boston, MA\"\n}"}}]},"finish_reason":"tool_calls"}]}
+
+data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":1,"id":"2","type":"function","function":{"name":"MyPlugin_FunctionWithException","arguments":"{\n\"argument\": \"value\"\n}"}}]},"finish_reason":"tool_calls"}]}
+
+data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":2,"id":"3","type":"function","function":{"name":"MyPlugin_NonExistentFunction","arguments":"{\n\"argument\": \"value\"\n}"}}]},"finish_reason":"tool_calls"}]}
+
+data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":3,"id":"4","type":"function","function":{"name":"MyPlugin_InvalidArguments","arguments":"invalid_arguments_format"}}]},"finish_reason":"tool_calls"}]}
+
+data: [DONE]
diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/filters_chatclient_multiple_function_calls_test_response.json b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/filters_chatclient_multiple_function_calls_test_response.json
new file mode 100644
index 000000000000..2c499b14089f
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/filters_chatclient_multiple_function_calls_test_response.json
@@ -0,0 +1,40 @@
+{
+ "id": "response-id",
+ "object": "chat.completion",
+ "created": 1699896916,
+ "model": "gpt-3.5-turbo-0613",
+ "choices": [
+ {
+ "index": 0,
+ "message": {
+ "role": "assistant",
+ "content": null,
+ "tool_calls": [
+ {
+ "id": "1",
+ "type": "function",
+ "function": {
+ "name": "MyPlugin_Function1",
+ "arguments": "{\n\"parameter\": \"function1-value\"\n}"
+ }
+ },
+ {
+ "id": "2",
+ "type": "function",
+ "function": {
+ "name": "MyPlugin_Function2",
+ "arguments": "{\n\"parameter\": \"function2-value\"\n}"
+ }
+ }
+ ]
+ },
+ "logprobs": null,
+ "finish_reason": "tool_calls"
+ }
+ ],
+ "usage": {
+ "prompt_tokens": 82,
+ "completion_tokens": 17,
+ "total_tokens": 99
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/filters_chatclient_streaming_multiple_function_calls_test_response.txt b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/filters_chatclient_streaming_multiple_function_calls_test_response.txt
new file mode 100644
index 000000000000..c113e3fa97ca
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/TestData/filters_chatclient_streaming_multiple_function_calls_test_response.txt
@@ -0,0 +1,5 @@
+data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":0,"id":"1","type":"function","function":{"name":"MyPlugin_Function1","arguments":"{\n\"parameter\": \"function1-value\"\n}"}}]},"finish_reason":"tool_calls"}]}
+
+data: {"id":"response-id","object":"chat.completion.chunk","created":1704212243,"model":"gpt-4","system_fingerprint":null,"choices":[{"index":0,"delta":{"role":"assistant","content":"Test chat streaming response","tool_calls":[{"index":1,"id":"2","type":"function","function":{"name":"MyPlugin_Function2","arguments":"{\n\"parameter\": \"function2-value\"\n}"}}]},"finish_reason":"tool_calls"}]}
+
+data: [DONE]
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj b/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj
index 64a0e72bde6d..c17e878a7a42 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj
+++ b/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj
@@ -37,6 +37,7 @@
+
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIKernelBuilderExtensions.ChatClient.cs b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIKernelBuilderExtensions.ChatClient.cs
new file mode 100644
index 000000000000..9d1832b340ff
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIKernelBuilderExtensions.ChatClient.cs
@@ -0,0 +1,105 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Diagnostics.CodeAnalysis;
+using System.Net.Http;
+using Microsoft.Extensions.AI;
+using OpenAI;
+
+namespace Microsoft.SemanticKernel;
+
+/// Extension methods for .
+[Experimental("SKEXP0010")]
+public static class OpenAIChatClientKernelBuilderExtensions
+{
+ #region Chat Completion
+
+ ///
+ /// Adds an OpenAI to the .
+ ///
+ /// The instance to augment.
+ /// OpenAI model name, see https://platform.openai.com/docs/models
+ /// OpenAI API key, see https://platform.openai.com/account/api-keys
+ /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations.
+ /// A local identifier for the given AI service
+ /// The HttpClient to use with this service.
+ /// The same instance as .
+ public static IKernelBuilder AddOpenAIChatClient(
+ this IKernelBuilder builder,
+ string modelId,
+ string apiKey,
+ string? orgId = null,
+ string? serviceId = null,
+ HttpClient? httpClient = null)
+ {
+ Verify.NotNull(builder);
+
+ builder.Services.AddOpenAIChatClient(
+ modelId,
+ apiKey,
+ orgId,
+ serviceId,
+ httpClient);
+
+ return builder;
+ }
+
+ ///
+ /// Adds an OpenAI to the .
+ ///
+ /// The instance to augment.
+ /// OpenAI model id
+ /// to use for the service. If null, one must be available in the service provider when this service is resolved.
+ /// A local identifier for the given AI service
+ /// The same instance as .
+ public static IKernelBuilder AddOpenAIChatClient(
+ this IKernelBuilder builder,
+ string modelId,
+ OpenAIClient? openAIClient = null,
+ string? serviceId = null)
+ {
+ Verify.NotNull(builder);
+
+ builder.Services.AddOpenAIChatClient(
+ modelId,
+ openAIClient,
+ serviceId);
+
+ return builder;
+ }
+
+ ///
+ /// Adds a custom endpoint OpenAI to the .
+ ///
+ /// The instance to augment.
+ /// OpenAI model name, see https://platform.openai.com/docs/models
+ /// Custom OpenAI Compatible Message API endpoint
+ /// OpenAI API key, see https://platform.openai.com/account/api-keys
+ /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations.
+ /// A local identifier for the given AI service
+ /// The HttpClient to use with this service.
+ /// The same instance as .
+ public static IKernelBuilder AddOpenAIChatClient(
+ this IKernelBuilder builder,
+ string modelId,
+ Uri endpoint,
+ string? apiKey,
+ string? orgId = null,
+ string? serviceId = null,
+ HttpClient? httpClient = null)
+ {
+ Verify.NotNull(builder);
+
+ builder.Services.AddOpenAIChatClient(
+ modelId,
+ endpoint,
+ apiKey,
+ orgId,
+ serviceId,
+ httpClient);
+
+ return builder;
+ }
+
+ #endregion
+}
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIKernelBuilderExtensions.cs
index a07a81fdb5b3..01307b9adc2a 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIKernelBuilderExtensions.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIKernelBuilderExtensions.cs
@@ -20,7 +20,7 @@
namespace Microsoft.SemanticKernel;
///
-/// Sponsor extensions class for .
+/// Extension methods for .
///
public static class OpenAIKernelBuilderExtensions
{
@@ -269,7 +269,7 @@ public static IKernelBuilder AddOpenAIFiles(
#region Chat Completion
///
- /// Adds the OpenAI chat completion service to the list.
+ /// Adds an to the .
///
/// The instance to augment.
/// OpenAI model name, see https://platform.openai.com/docs/models
@@ -304,7 +304,7 @@ OpenAIChatCompletionService Factory(IServiceProvider serviceProvider, object? _)
}
///
- /// Adds the OpenAI chat completion service to the list.
+ /// Adds an to the .
///
/// The instance to augment.
/// OpenAI model id
@@ -330,7 +330,7 @@ OpenAIChatCompletionService Factory(IServiceProvider serviceProvider, object? _)
}
///
- /// Adds the Custom Endpoint OpenAI chat completion service to the list.
+ /// Adds a custom endpoint to the .
///
/// The instance to augment.
/// OpenAI model name, see https://platform.openai.com/docs/models
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIServiceCollectionExtensions.ChatClient.cs b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIServiceCollectionExtensions.ChatClient.cs
new file mode 100644
index 000000000000..2954e958936a
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.OpenAI/Extensions/OpenAIServiceCollectionExtensions.ChatClient.cs
@@ -0,0 +1,154 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.ClientModel;
+using System.ClientModel.Primitives;
+using System.Diagnostics.CodeAnalysis;
+using System.Net.Http;
+using Microsoft.Extensions.AI;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Http;
+using OpenAI;
+
+namespace Microsoft.SemanticKernel;
+
+///
+/// Sponsor extensions class for .
+///
+[Experimental("SKEXP0010")]
+public static class OpenAIChatClientServiceCollectionExtensions
+{
+ ///
+ /// White space constant.
+ ///
+ private const string SingleSpace = " ";
+
+ ///
+ /// Adds the OpenAI chat completion service to the list.
+ ///
+ /// The instance to augment.
+ /// OpenAI model name, see https://platform.openai.com/docs/models
+ /// OpenAI API key, see https://platform.openai.com/account/api-keys
+ /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations.
+ /// A local identifier for the given AI service
+ /// The HttpClient to use with this service.
+ /// The same instance as .
+ public static IServiceCollection AddOpenAIChatClient(
+ this IServiceCollection services,
+ string modelId,
+ string apiKey,
+ string? orgId = null,
+ string? serviceId = null,
+ HttpClient? httpClient = null)
+ {
+ Verify.NotNull(services);
+
+ IChatClient Factory(IServiceProvider serviceProvider, object? _)
+ {
+ var loggerFactory = serviceProvider.GetService();
+
+ return new OpenAIClient(new ApiKeyCredential(apiKey ?? SingleSpace), options: GetClientOptions(orgId: orgId, httpClient: HttpClientProvider.GetHttpClient(httpClient, serviceProvider)))
+ .GetChatClient(modelId)
+ .AsIChatClient()
+ .AsKernelFunctionInvokingChatClient(loggerFactory);
+ }
+
+ services.AddKeyedSingleton(serviceId, (Func)Factory);
+
+ return services;
+ }
+
+ ///
+ /// Adds the OpenAI chat completion service to the list.
+ ///
+ /// The instance to augment.
+ /// OpenAI model id
+ /// to use for the service. If null, one must be available in the service provider when this service is resolved.
+ /// A local identifier for the given AI service
+ /// The same instance as .
+ public static IServiceCollection AddOpenAIChatClient(this IServiceCollection services,
+ string modelId,
+ OpenAIClient? openAIClient = null,
+ string? serviceId = null)
+ {
+ Verify.NotNull(services);
+
+ IChatClient Factory(IServiceProvider serviceProvider, object? _)
+ {
+ var loggerFactory = serviceProvider.GetService();
+
+ return (openAIClient ?? serviceProvider.GetRequiredService())
+ .GetChatClient(modelId)
+ .AsIChatClient()
+ .AsKernelFunctionInvokingChatClient(loggerFactory);
+ }
+
+ services.AddKeyedSingleton(serviceId, (Func)Factory);
+
+ return services;
+ }
+
+ ///
+ /// Adds the Custom OpenAI chat completion service to the list.
+ ///
+ /// The instance to augment.
+ /// OpenAI model name, see https://platform.openai.com/docs/models
+ /// A Custom Message API compatible endpoint.
+ /// OpenAI API key, see https://platform.openai.com/account/api-keys
+ /// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations.
+ /// A local identifier for the given AI service
+ /// The HttpClient to use with this service.
+ /// The same instance as .
+ public static IServiceCollection AddOpenAIChatClient(
+ this IServiceCollection services,
+ string modelId,
+ Uri endpoint,
+ string? apiKey = null,
+ string? orgId = null,
+ string? serviceId = null,
+ HttpClient? httpClient = null)
+ {
+ Verify.NotNull(services);
+
+ IChatClient Factory(IServiceProvider serviceProvider, object? _)
+ {
+ var loggerFactory = serviceProvider.GetService();
+
+ return new OpenAIClient(new ApiKeyCredential(apiKey ?? SingleSpace), GetClientOptions(endpoint, orgId, HttpClientProvider.GetHttpClient(httpClient, serviceProvider)))
+ .GetChatClient(modelId)
+ .AsIChatClient()
+ .AsKernelFunctionInvokingChatClient(loggerFactory);
+ }
+
+ services.AddKeyedSingleton(serviceId, (Func)Factory);
+
+ return services;
+ }
+
+ private static OpenAIClientOptions GetClientOptions(
+ Uri? endpoint = null,
+ string? orgId = null,
+ HttpClient? httpClient = null)
+ {
+ OpenAIClientOptions options = new();
+
+ if (endpoint is not null)
+ {
+ options.Endpoint = endpoint;
+ }
+
+ if (orgId is not null)
+ {
+ options.OrganizationId = orgId;
+ }
+
+ if (httpClient is not null)
+ {
+ options.Transport = new HttpClientPipelineTransport(httpClient);
+ }
+
+ return options;
+ }
+}
diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIAudioToTextTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIAudioToTextTests.cs
index 90375307c533..9e1127fa8b55 100644
--- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIAudioToTextTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIAudioToTextTests.cs
@@ -22,7 +22,7 @@ public sealed class OpenAIAudioToTextTests()
.AddUserSecrets()
.Build();
- [RetryFact]//(Skip = "OpenAI will often throttle requests. This test is for manual verification.")]
+ [RetryFact] //(Skip = "OpenAI will often throttle requests. This test is for manual verification.")]
public async Task OpenAIAudioToTextTestAsync()
{
// Arrange
diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletionTests.cs
index fe8ff155d9c5..ddfe6b997a25 100644
--- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletionTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIChatCompletionTests.cs
@@ -12,7 +12,6 @@
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Http.Resilience;
-using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using OpenAI;
@@ -48,16 +47,16 @@ public async Task ItCanUseOpenAiChatForTextGenerationAsync()
[Fact]
public async Task ItCanUseOpenAiChatClientAndContentsAsync()
{
- var OpenAIConfiguration = this._configuration.GetSection("OpenAI").Get();
- Assert.NotNull(OpenAIConfiguration);
- Assert.NotNull(OpenAIConfiguration.ChatModelId);
- Assert.NotNull(OpenAIConfiguration.ApiKey);
- Assert.NotNull(OpenAIConfiguration.ServiceId);
+ var openAIConfiguration = this._configuration.GetSection("OpenAI").Get();
+ Assert.NotNull(openAIConfiguration);
+ Assert.NotNull(openAIConfiguration.ChatModelId);
+ Assert.NotNull(openAIConfiguration.ApiKey);
+ Assert.NotNull(openAIConfiguration.ServiceId);
// Arrange
- var openAIClient = new OpenAIClient(OpenAIConfiguration.ApiKey);
+ var openAIClient = new OpenAIClient(openAIConfiguration.ApiKey);
var builder = Kernel.CreateBuilder();
- builder.Services.AddChatClient(openAIClient.AsChatClient(OpenAIConfiguration.ChatModelId));
+ builder.Services.AddChatClient(openAIClient.GetChatClient(openAIConfiguration.ChatModelId).AsIChatClient());
var kernel = builder.Build();
var func = kernel.CreateFunctionFromPrompt(
@@ -104,16 +103,16 @@ public async Task OpenAIStreamingTestAsync()
[Fact]
public async Task ItCanUseOpenAiStreamingChatClientAndContentsAsync()
{
- var OpenAIConfiguration = this._configuration.GetSection("OpenAI").Get();
- Assert.NotNull(OpenAIConfiguration);
- Assert.NotNull(OpenAIConfiguration.ChatModelId);
- Assert.NotNull(OpenAIConfiguration.ApiKey);
- Assert.NotNull(OpenAIConfiguration.ServiceId);
+ var openAIConfiguration = this._configuration.GetSection("OpenAI").Get();
+ Assert.NotNull(openAIConfiguration);
+ Assert.NotNull(openAIConfiguration.ChatModelId);
+ Assert.NotNull(openAIConfiguration.ApiKey);
+ Assert.NotNull(openAIConfiguration.ServiceId);
// Arrange
- var openAIClient = new OpenAIClient(OpenAIConfiguration.ApiKey);
+ var openAIClient = new OpenAIClient(openAIConfiguration.ApiKey);
var builder = Kernel.CreateBuilder();
- builder.Services.AddChatClient(openAIClient.AsChatClient(OpenAIConfiguration.ChatModelId));
+ builder.Services.AddChatClient(openAIClient.GetChatClient(openAIConfiguration.ChatModelId).AsIChatClient());
var kernel = builder.Build();
var plugins = TestHelpers.ImportSamplePlugins(kernel, "ChatPlugin");
@@ -179,7 +178,7 @@ public async Task OpenAIHttpRetryPolicyTestAsync()
// Assert
Assert.All(statusCodes, s => Assert.Equal(HttpStatusCode.Unauthorized, s));
- Assert.Equal(HttpStatusCode.Unauthorized, ((HttpOperationException)exception).StatusCode);
+ Assert.Equal(HttpStatusCode.Unauthorized, exception.StatusCode);
}
[Fact]
@@ -258,11 +257,11 @@ public async Task SemanticKernelVersionHeaderIsSentAsync()
var kernel = this.CreateAndInitializeKernel(httpClient);
// Act
- var result = await kernel.InvokePromptAsync("Where is the most famous fish market in Seattle, Washington, USA?");
+ await kernel.InvokePromptAsync("Where is the most famous fish market in Seattle, Washington, USA?");
// Assert
Assert.NotNull(httpHeaderHandler.RequestHeaders);
- Assert.True(httpHeaderHandler.RequestHeaders.TryGetValues("Semantic-Kernel-Version", out var values));
+ Assert.True(httpHeaderHandler.RequestHeaders.TryGetValues("Semantic-Kernel-Version", out var _));
}
//[Theory(Skip = "This test is for manual verification.")]
@@ -301,18 +300,18 @@ public async Task LogProbsDataIsReturnedWhenRequestedAsync(bool? logprobs, int?
private Kernel CreateAndInitializeKernel(HttpClient? httpClient = null)
{
- var OpenAIConfiguration = this._configuration.GetSection("OpenAI").Get();
- Assert.NotNull(OpenAIConfiguration);
- Assert.NotNull(OpenAIConfiguration.ChatModelId);
- Assert.NotNull(OpenAIConfiguration.ApiKey);
- Assert.NotNull(OpenAIConfiguration.ServiceId);
+ var openAIConfiguration = this._configuration.GetSection("OpenAI").Get();
+ Assert.NotNull(openAIConfiguration);
+ Assert.NotNull(openAIConfiguration.ChatModelId);
+ Assert.NotNull(openAIConfiguration.ApiKey);
+ Assert.NotNull(openAIConfiguration.ServiceId);
- var kernelBuilder = base.CreateKernelBuilder();
+ var kernelBuilder = this.CreateKernelBuilder();
kernelBuilder.AddOpenAIChatCompletion(
- modelId: OpenAIConfiguration.ChatModelId,
- apiKey: OpenAIConfiguration.ApiKey,
- serviceId: OpenAIConfiguration.ServiceId,
+ modelId: openAIConfiguration.ChatModelId,
+ apiKey: openAIConfiguration.ApiKey,
+ serviceId: openAIConfiguration.ServiceId,
httpClient: httpClient);
return kernelBuilder.Build();
diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToAudioTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToAudioTests.cs
index c2818abe2502..420295fe4349 100644
--- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToAudioTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToAudioTests.cs
@@ -18,7 +18,7 @@ public sealed class OpenAITextToAudioTests
.AddUserSecrets()
.Build();
- [Fact]//(Skip = "OpenAI will often throttle requests. This test is for manual verification.")]
+ [Fact] //(Skip = "OpenAI will often throttle requests. This test is for manual verification.")]
public async Task OpenAITextToAudioTestAsync()
{
// Arrange
diff --git a/dotnet/src/InternalUtilities/connectors/AI/FunctionCalling/FunctionCallsProcessor.cs b/dotnet/src/InternalUtilities/connectors/AI/FunctionCalling/FunctionCallsProcessor.cs
index 97cb426c307d..88f3da9d6a53 100644
--- a/dotnet/src/InternalUtilities/connectors/AI/FunctionCalling/FunctionCallsProcessor.cs
+++ b/dotnet/src/InternalUtilities/connectors/AI/FunctionCalling/FunctionCallsProcessor.cs
@@ -211,7 +211,7 @@ public FunctionCallsProcessor(ILogger? logger = null)
{
bool terminationRequested = false;
- // Wait for all of the function invocations to complete, then add the results to the chat, but stop when we hit a
+ // Wait for all the function invocations to complete, then add the results to the chat, but stop when we hit a
// function for which termination was requested.
FunctionResultContext[] resultContexts = await Task.WhenAll(functionTasks).ConfigureAwait(false);
foreach (FunctionResultContext resultContext in resultContexts)
@@ -487,8 +487,8 @@ public static string ProcessFunctionResult(object functionResult)
return stringResult;
}
- // This is an optimization to use ChatMessageContent content directly
- // without unnecessary serialization of the whole message content class.
+ // This is an optimization to use ChatMessageContent content directly
+ // without unnecessary serialization of the whole message content class.
if (functionResult is ChatMessageContent chatMessageContent)
{
return chatMessageContent.ToString();
diff --git a/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs b/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs
index 2fefb6ee9d16..3a8d561f4eaf 100644
--- a/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs
+++ b/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs
@@ -99,25 +99,25 @@ protected IChatClient AddChatClientToKernel(IKernelBuilder builder)
IChatClient chatClient;
if (this.UseOpenAIConfig)
{
- chatClient = new Microsoft.Extensions.AI.OpenAIChatClient(
- new OpenAI.OpenAIClient(TestConfiguration.OpenAI.ApiKey),
- TestConfiguration.OpenAI.ChatModelId);
+ chatClient = new OpenAI.OpenAIClient(TestConfiguration.OpenAI.ApiKey)
+ .GetChatClient(TestConfiguration.OpenAI.ChatModelId)
+ .AsIChatClient();
}
else if (!string.IsNullOrEmpty(this.ApiKey))
{
- chatClient = new Microsoft.Extensions.AI.OpenAIChatClient(
- openAIClient: new AzureOpenAIClient(
+ chatClient = new AzureOpenAIClient(
endpoint: new Uri(TestConfiguration.AzureOpenAI.Endpoint),
- credential: new ApiKeyCredential(TestConfiguration.AzureOpenAI.ApiKey)),
- modelId: TestConfiguration.AzureOpenAI.ChatDeploymentName);
+ credential: new ApiKeyCredential(TestConfiguration.AzureOpenAI.ApiKey))
+ .GetChatClient(TestConfiguration.AzureOpenAI.ChatDeploymentName)
+ .AsIChatClient();
}
else
{
- chatClient = new Microsoft.Extensions.AI.OpenAIChatClient(
- openAIClient: new AzureOpenAIClient(
+ chatClient = new AzureOpenAIClient(
endpoint: new Uri(TestConfiguration.AzureOpenAI.Endpoint),
- credential: new AzureCliCredential()),
- modelId: TestConfiguration.AzureOpenAI.ChatDeploymentName);
+ credential: new AzureCliCredential())
+ .GetChatClient(TestConfiguration.AzureOpenAI.ChatDeploymentName)
+ .AsIChatClient();
}
var functionCallingChatClient = chatClient!.AsKernelFunctionInvokingChatClient();
diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatClient/AIFunctionFactory.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatClient/AIFunctionFactory.cs
deleted file mode 100644
index a0d6b1865a8f..000000000000
--- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatClient/AIFunctionFactory.cs
+++ /dev/null
@@ -1,631 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System;
-using System.Buffers;
-using System.Collections.Concurrent;
-using System.Collections.Generic;
-using System.ComponentModel;
-using System.Diagnostics;
-using System.Diagnostics.CodeAnalysis;
-using System.IO;
-using System.Linq;
-using System.Reflection;
-using System.Runtime.CompilerServices;
-using System.Text.Json;
-using System.Text.Json.Nodes;
-using System.Text.Json.Serialization.Metadata;
-using System.Text.RegularExpressions;
-using System.Threading;
-using System.Threading.Tasks;
-using Microsoft.Extensions.AI;
-
-#pragma warning disable IDE0009 // Use explicit 'this.' qualifier
-#pragma warning disable IDE1006 // Missing static prefix s_ suffix
-
-namespace Microsoft.SemanticKernel.ChatCompletion;
-
-// Slight modified source from
-// https://raw.githubusercontent.com/dotnet/extensions/refs/heads/main/src/Libraries/Microsoft.Extensions.AI/Functions/AIFunctionFactory.cs
-
-/// Provides factory methods for creating commonly used implementations of .
-[ExcludeFromCodeCoverage]
-internal static partial class AIFunctionFactory
-{
- /// Holds the default options instance used when creating function.
- private static readonly AIFunctionFactoryOptions _defaultOptions = new();
-
- /// Creates an instance for a method, specified via a delegate.
- /// The method to be represented via the created .
- /// Metadata to use to override defaults inferred from .
- /// The created for invoking .
- ///
- ///
- /// Return values are serialized to using 's
- /// . Arguments that are not already of the expected type are
- /// marshaled to the expected type via JSON and using 's
- /// . If the argument is a ,
- /// , or , it is deserialized directly. If the argument is anything else unknown,
- /// it is round-tripped through JSON, serializing the object as JSON and then deserializing it to the expected type.
- ///
- ///
- public static AIFunction Create(Delegate method, AIFunctionFactoryOptions? options)
- {
- Verify.NotNull(method);
-
- return ReflectionAIFunction.Build(method.Method, method.Target, options ?? _defaultOptions);
- }
-
- /// Creates an instance for a method, specified via a delegate.
- /// The method to be represented via the created .
- /// The name to use for the .
- /// The description to use for the .
- /// The used to marshal function parameters and any return value.
- /// The created for invoking .
- ///
- ///
- /// Return values are serialized to using .
- /// Arguments that are not already of the expected type are marshaled to the expected type via JSON and using
- /// . If the argument is a , ,
- /// or , it is deserialized directly. If the argument is anything else unknown, it is
- /// round-tripped through JSON, serializing the object as JSON and then deserializing it to the expected type.
- ///
- ///
- public static AIFunction Create(Delegate method, string? name = null, string? description = null, JsonSerializerOptions? serializerOptions = null)
- {
- Verify.NotNull(method);
-
- AIFunctionFactoryOptions createOptions = serializerOptions is null && name is null && description is null
- ? _defaultOptions
- : new()
- {
- Name = name,
- Description = description,
- SerializerOptions = serializerOptions,
- };
-
- return ReflectionAIFunction.Build(method.Method, method.Target, createOptions);
- }
-
- ///
- /// Creates an instance for a method, specified via an instance
- /// and an optional target object if the method is an instance method.
- ///
- /// The method to be represented via the created .
- ///
- /// The target object for the if it represents an instance method.
- /// This should be if and only if is a static method.
- ///
- /// Metadata to use to override defaults inferred from .
- /// The created for invoking .
- ///
- ///
- /// Return values are serialized to using 's
- /// . Arguments that are not already of the expected type are
- /// marshaled to the expected type via JSON and using 's
- /// . If the argument is a ,
- /// , or , it is deserialized directly. If the argument is anything else unknown,
- /// it is round-tripped through JSON, serializing the object as JSON and then deserializing it to the expected type.
- ///
- ///
- public static AIFunction Create(MethodInfo method, object? target, AIFunctionFactoryOptions? options)
- {
- Verify.NotNull(method);
-
- return ReflectionAIFunction.Build(method, target, options ?? _defaultOptions);
- }
-
- ///
- /// Creates an instance for a method, specified via an instance
- /// and an optional target object if the method is an instance method.
- ///
- /// The method to be represented via the created .
- ///
- /// The target object for the if it represents an instance method.
- /// This should be if and only if is a static method.
- ///
- /// The name to use for the .
- /// The description to use for the .
- /// The used to marshal function parameters and return value.
- /// The created for invoking .
- ///
- ///
- /// Return values are serialized to using .
- /// Arguments that are not already of the expected type are marshaled to the expected type via JSON and using
- /// . If the argument is a , ,
- /// or , it is deserialized directly. If the argument is anything else unknown, it is
- /// round-tripped through JSON, serializing the object as JSON and then deserializing it to the expected type.
- ///
- ///
- public static AIFunction Create(MethodInfo method, object? target, string? name = null, string? description = null, JsonSerializerOptions? serializerOptions = null)
- {
- Verify.NotNull(method);
-
- AIFunctionFactoryOptions createOptions = serializerOptions is null && name is null && description is null
- ? _defaultOptions
- : new()
- {
- Name = name,
- Description = description,
- SerializerOptions = serializerOptions,
- };
-
- return ReflectionAIFunction.Build(method, target, createOptions);
- }
-
- private sealed class ReflectionAIFunction : AIFunction
- {
- public static ReflectionAIFunction Build(MethodInfo method, object? target, AIFunctionFactoryOptions options)
- {
- Verify.NotNull(method);
-
- if (method.ContainsGenericParameters)
- {
- throw new ArgumentException("Open generic methods are not supported", nameof(method));
- }
-
- if (!method.IsStatic && target is null)
- {
- throw new ArgumentNullException(nameof(target), "Target must not be null for an instance method.");
- }
-
- ReflectionAIFunctionDescriptor functionDescriptor = ReflectionAIFunctionDescriptor.GetOrCreate(method, options);
-
- if (target is null && options.AdditionalProperties is null)
- {
- // We can use a cached value for static methods not specifying additional properties.
- return functionDescriptor.CachedDefaultInstance ??= new(functionDescriptor, target, options);
- }
-
- return new(functionDescriptor, target, options);
- }
-
- private ReflectionAIFunction(ReflectionAIFunctionDescriptor functionDescriptor, object? target, AIFunctionFactoryOptions options)
- {
- FunctionDescriptor = functionDescriptor;
- Target = target;
- AdditionalProperties = options.AdditionalProperties ?? EmptyReadOnlyDictionary.Instance;
- }
-
- public ReflectionAIFunctionDescriptor FunctionDescriptor { get; }
- public object? Target { get; }
- public override IReadOnlyDictionary AdditionalProperties { get; }
- public override string Name => FunctionDescriptor.Name;
- public override string Description => FunctionDescriptor.Description;
- public override MethodInfo UnderlyingMethod => FunctionDescriptor.Method;
- public override JsonElement JsonSchema => FunctionDescriptor.JsonSchema;
- public override JsonSerializerOptions JsonSerializerOptions => FunctionDescriptor.JsonSerializerOptions;
- protected override Task