Skip to content

.Net: AutoFunctionInvocation IChatClient Support #11536

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
fa1776f
AddChatClient OpenAI WIP
RogerBarreto Apr 1, 2025
69ff2c2
Adding UT and Extension Methods
RogerBarreto Apr 3, 2025
dbd0aab
Function Call impl
RogerBarreto Apr 3, 2025
9c892e1
Auto vs KernelFIC
RogerBarreto Apr 4, 2025
be5583e
AutoFunctionInvocationContext as KFIC
RogerBarreto Apr 4, 2025
8663ab0
AutoFunctionInvocation WIP
RogerBarreto Apr 7, 2025
704c80b
AutoContext losing result
RogerBarreto Apr 8, 2025
e4ae494
FilterCanOverrideArguments
RogerBarreto Apr 9, 2025
7a5fc08
Fix failing UT + Aded PromptExecutionSettings to be propagated into A…
RogerBarreto Apr 9, 2025
11de110
Resolving UT for FilterCanHandleException
RogerBarreto Apr 9, 2025
85c8cc4
Adjust for HandleExceptionONStreaming
RogerBarreto Apr 9, 2025
ead8061
Added all behavior except Skipping
RogerBarreto Apr 9, 2025
df4b108
Fix warnings
RogerBarreto Apr 10, 2025
ffc01d9
AutoINvocation Skipping and ChatReducer passing
RogerBarreto Apr 11, 2025
f7ee7a2
Fix CallId optionality
RogerBarreto Apr 11, 2025
7344afd
Fix warnings
RogerBarreto Apr 11, 2025
de4aa18
Merge branch 'feature-msextensions-ai' of https://github.com/microsof…
RogerBarreto Apr 11, 2025
196fa98
Fix 9.4.0 conflicts and errors
RogerBarreto Apr 11, 2025
83c2e18
Internalizing components
RogerBarreto Apr 11, 2025
c3b89d8
Starting update of FunctionInvokingChatClient
RogerBarreto Apr 14, 2025
3d56b95
Using Extensions AI + Latest logic for FunctionINvokingChatClient
RogerBarreto Apr 14, 2025
6d7a374
Removing KernelFunctionINvocationContext in favor of Microsoft.Extens…
RogerBarreto Apr 14, 2025
7a38027
Fix reference
RogerBarreto Apr 14, 2025
c989c14
Typo fix
RogerBarreto Apr 14, 2025
4316651
Fix typos + virtual to private
RogerBarreto Apr 14, 2025
cb636e9
Address PR comments
RogerBarreto Apr 15, 2025
7043c72
Address PR feedback
RogerBarreto Apr 16, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions dotnet/Directory.Packages.props
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,6 @@
<PackageVersion Include="SharpToken" Version="2.0.3" />
<!-- Microsoft.Extensions.* -->
<PackageVersion Include="Microsoft.Extensions.AI" Version="9.4.0-preview.1.25207.5" />
<PackageVersion Include="Microsoft.Extensions.AI.Abstractions" Version="9.4.0-preview.1.25207.5" />
<PackageVersion Include="Microsoft.Extensions.AI.AzureAIInference" Version="9.4.0-preview.1.25207.5" />
<PackageVersion Include="Microsoft.Extensions.AI.OpenAI" Version="9.4.0-preview.1.25207.5" />
<PackageVersion Include="Microsoft.Extensions.Configuration" Version="8.0.0" />
Expand All @@ -109,7 +108,7 @@
<PackageVersion Include="Microsoft.Extensions.Http.Resilience" Version="8.9.1" />
<PackageVersion Include="Microsoft.Extensions.ServiceDiscovery" Version="9.0.0" />
<PackageVersion Include="Microsoft.Extensions.Logging" Version="8.0.1" />
<PackageVersion Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.2" />
<PackageVersion Include="Microsoft.Extensions.Logging.Abstractions" Version="8.0.3" />
<PackageVersion Include="Microsoft.Extensions.Logging.Console" Version="8.0.1" />
<PackageVersion Include="Microsoft.Extensions.Logging.Debug" Version="8.0.1" />
<PackageVersion Include="Microsoft.Extensions.Options.DataAnnotations" Version="8.0.0" />
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,11 +109,11 @@ private Kernel CreateKernelWithTwoServices(bool useChatClient)
{
builder.Services.AddKeyedChatClient(
ServiceKeyBad,
new OpenAI.OpenAIClient("bad-key").AsChatClient(TestConfiguration.OpenAI.ChatModelId));
new OpenAI.OpenAIClient("bad-key").GetChatClient(TestConfiguration.OpenAI.ChatModelId).AsIChatClient());

builder.Services.AddKeyedChatClient(
ServiceKeyGood,
new OpenAI.OpenAIClient(TestConfiguration.OpenAI.ApiKey).AsChatClient(TestConfiguration.OpenAI.ChatModelId));
new OpenAI.OpenAIClient(TestConfiguration.OpenAI.ApiKey).GetChatClient(TestConfiguration.OpenAI.ChatModelId).AsIChatClient());
}
else
{
Expand All @@ -122,14 +122,16 @@ private Kernel CreateKernelWithTwoServices(bool useChatClient)
new Azure.AI.OpenAI.AzureOpenAIClient(
new Uri(TestConfiguration.AzureOpenAI.Endpoint),
new Azure.AzureKeyCredential("bad-key"))
.AsChatClient(TestConfiguration.AzureOpenAI.ChatDeploymentName));
.GetChatClient(TestConfiguration.AzureOpenAI.ChatDeploymentName)
.AsIChatClient());

builder.Services.AddKeyedChatClient(
ServiceKeyGood,
new Azure.AI.OpenAI.AzureOpenAIClient(
new Uri(TestConfiguration.AzureOpenAI.Endpoint),
new Azure.AzureKeyCredential(TestConfiguration.AzureOpenAI.ApiKey))
.AsChatClient(TestConfiguration.AzureOpenAI.ChatDeploymentName));
.GetChatClient(TestConfiguration.AzureOpenAI.ChatDeploymentName)
.AsIChatClient());
}
}
else
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,167 @@
// Copyright (c) Microsoft. All rights reserved.

using Microsoft.Extensions.DependencyInjection;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.OpenAI;

namespace Filtering;

public class ChatClient_AutoFunctionInvocationFiltering(ITestOutputHelper output) : BaseTest(output)
{
/// <summary>
/// Shows how to use <see cref="IAutoFunctionInvocationFilter"/>.
/// </summary>
[Fact]
public async Task UsingAutoFunctionInvocationFilter()
{
var builder = Kernel.CreateBuilder();

builder.AddOpenAIChatClient("gpt-4", TestConfiguration.OpenAI.ApiKey);

// This filter outputs information about auto function invocation and returns overridden result.
builder.Services.AddSingleton<IAutoFunctionInvocationFilter>(new AutoFunctionInvocationFilter(this.Output));

var kernel = builder.Build();

var function = KernelFunctionFactory.CreateFromMethod(() => "Result from function", "MyFunction");

kernel.ImportPluginFromFunctions("MyPlugin", [function]);

var executionSettings = new OpenAIPromptExecutionSettings
{
FunctionChoiceBehavior = FunctionChoiceBehavior.Required([function], autoInvoke: true)
};

var result = await kernel.InvokePromptAsync("Invoke provided function and return result", new(executionSettings));

Console.WriteLine(result);

// Output:
// Request sequence number: 0
// Function sequence number: 0
// Total number of functions: 1
// Result from auto function invocation filter.
}

/// <summary>
/// Shows how to get list of function calls by using <see cref="IAutoFunctionInvocationFilter"/>.
/// </summary>
[Fact]
public async Task GetFunctionCallsWithFilterAsync()
{
var builder = Kernel.CreateBuilder();

builder.AddOpenAIChatCompletion("gpt-3.5-turbo-1106", TestConfiguration.OpenAI.ApiKey);

builder.Services.AddSingleton<IAutoFunctionInvocationFilter>(new FunctionCallsFilter(this.Output));

var kernel = builder.Build();

kernel.ImportPluginFromFunctions("HelperFunctions",
[
kernel.CreateFunctionFromMethod(() => DateTime.UtcNow.ToString("R"), "GetCurrentUtcTime", "Retrieves the current time in UTC."),
kernel.CreateFunctionFromMethod((string cityName) =>
cityName switch
{
"Boston" => "61 and rainy",
"London" => "55 and cloudy",
"Miami" => "80 and sunny",
"Paris" => "60 and rainy",
"Tokyo" => "50 and sunny",
"Sydney" => "75 and sunny",
"Tel Aviv" => "80 and sunny",
_ => "31 and snowing",
}, "GetWeatherForCity", "Gets the current weather for the specified city"),
]);

var executionSettings = new OpenAIPromptExecutionSettings
{
FunctionChoiceBehavior = FunctionChoiceBehavior.Auto()
};

await foreach (var chunk in kernel.InvokePromptStreamingAsync("Check current UTC time and return current weather in Boston city.", new(executionSettings)))
{
Console.WriteLine(chunk.ToString());
}

// Output:
// Request #0. Function call: HelperFunctions.GetCurrentUtcTime.
// Request #0. Function call: HelperFunctions.GetWeatherForCity.
// The current UTC time is {time of execution}, and the current weather in Boston is 61°F and rainy.
}

/// <summary>Shows available syntax for auto function invocation filter.</summary>
private sealed class AutoFunctionInvocationFilter(ITestOutputHelper output) : IAutoFunctionInvocationFilter
{
public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func<AutoFunctionInvocationContext, Task> next)
{
// Example: get function information
var functionName = context.Function.Name;

// Example: get chat history
var chatHistory = context.ChatHistory;

// Example: get information about all functions which will be invoked
var functionCalls = FunctionCallContent.GetFunctionCalls(context.ChatHistory.Last());

// In function calling functionality there are two loops.
// Outer loop is "request" loop - it performs multiple requests to LLM until user ask will be satisfied.
// Inner loop is "function" loop - it handles LLM response with multiple function calls.

// Workflow example:
// 1. Request to LLM #1 -> Response with 3 functions to call.
// 1.1. Function #1 called.
// 1.2. Function #2 called.
// 1.3. Function #3 called.
// 2. Request to LLM #2 -> Response with 2 functions to call.
// 2.1. Function #1 called.
// 2.2. Function #2 called.

// context.RequestSequenceIndex - it's a sequence number of outer/request loop operation.
// context.FunctionSequenceIndex - it's a sequence number of inner/function loop operation.
// context.FunctionCount - number of functions which will be called per request (based on example above: 3 for first request, 2 for second request).

// Example: get request sequence index
output.WriteLine($"Request sequence index: {context.RequestSequenceIndex}");

// Example: get function sequence index
output.WriteLine($"Function sequence index: {context.FunctionSequenceIndex}");

// Example: get total number of functions which will be called
output.WriteLine($"Total number of functions: {context.FunctionCount}");

// Calling next filter in pipeline or function itself.
// By skipping this call, next filters and function won't be invoked, and function call loop will proceed to the next function.
await next(context);

// Example: get function result
var result = context.Result;

// Example: override function result value
context.Result = new FunctionResult(context.Result, "Result from auto function invocation filter");

// Example: Terminate function invocation
context.Terminate = true;
}
}

/// <summary>Shows how to get list of all function calls per request.</summary>
private sealed class FunctionCallsFilter(ITestOutputHelper output) : IAutoFunctionInvocationFilter
{
public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func<AutoFunctionInvocationContext, Task> next)
{
var chatHistory = context.ChatHistory;
var functionCalls = FunctionCallContent.GetFunctionCalls(chatHistory.Last()).ToArray();

if (functionCalls is { Length: > 0 })
{
foreach (var functionCall in functionCalls)
{
output.WriteLine($"Request #{context.RequestSequenceIndex}. Function call: {functionCall.PluginName}.{functionCall.FunctionName}.");
}
}

await next(context);
}
}
}
8 changes: 4 additions & 4 deletions dotnet/samples/Concepts/Kernel/CustomAIServiceSelector.cs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
namespace KernelExamples;

/// <summary>
/// This sample shows how to use a custom AI service selector to select a specific model by matching it's id.
/// This sample shows how to use a custom AI service selector to select a specific model by matching the model id.
/// </summary>
public class CustomAIServiceSelector(ITestOutputHelper output) : BaseTest(output)
{
Expand Down Expand Up @@ -39,7 +39,8 @@ public async Task UsingCustomSelectToSelectServiceByMatchingModelId()
builder.Services
.AddSingleton<IAIServiceSelector>(customSelector)
.AddKeyedChatClient("OpenAIChatClient", new OpenAI.OpenAIClient(TestConfiguration.OpenAI.ApiKey)
.AsChatClient("gpt-4o")); // Add a IChatClient to the kernel
.GetChatClient("gpt-4o")
.AsIChatClient()); // Add a IChatClient to the kernel

Kernel kernel = builder.Build();

Expand All @@ -60,7 +61,6 @@ private sealed class GptAIServiceSelector(string modelNameStartsWith, ITestOutpu
private readonly ITestOutputHelper _output = output;
private readonly string _modelNameStartsWith = modelNameStartsWith;

/// <inheritdoc/>
private bool TrySelect<T>(
Kernel kernel, KernelFunction function, KernelArguments arguments,
[NotNullWhen(true)] out T? service, out PromptExecutionSettings? serviceSettings) where T : class
Expand All @@ -78,7 +78,7 @@ private bool TrySelect<T>(
else if (serviceToCheck is IChatClient chatClient)
{
var metadata = chatClient.GetService<ChatClientMetadata>();
serviceModelId = metadata?.ModelId;
serviceModelId = metadata?.DefaultModelId;
endpoint = metadata?.ProviderUri?.ToString();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,25 +43,25 @@ public async Task UseDependencyInjectionToCreateAgentAsync(bool useChatClient)
IChatClient chatClient;
if (this.UseOpenAIConfig)
{
chatClient = new Microsoft.Extensions.AI.OpenAIChatClient(
new OpenAI.OpenAIClient(TestConfiguration.OpenAI.ApiKey),
TestConfiguration.OpenAI.ChatModelId);
chatClient = new OpenAI.OpenAIClient(TestConfiguration.OpenAI.ApiKey)
.GetChatClient(TestConfiguration.OpenAI.ChatModelId)
.AsIChatClient();
}
else if (!string.IsNullOrEmpty(this.ApiKey))
{
chatClient = new Microsoft.Extensions.AI.OpenAIChatClient(
openAIClient: new AzureOpenAIClient(
chatClient = new AzureOpenAIClient(
endpoint: new Uri(TestConfiguration.AzureOpenAI.Endpoint),
credential: new ApiKeyCredential(TestConfiguration.AzureOpenAI.ApiKey)),
modelId: TestConfiguration.AzureOpenAI.ChatModelId);
credential: new ApiKeyCredential(TestConfiguration.AzureOpenAI.ApiKey))
.GetChatClient(TestConfiguration.OpenAI.ChatModelId)
.AsIChatClient();
}
else
{
chatClient = new Microsoft.Extensions.AI.OpenAIChatClient(
openAIClient: new AzureOpenAIClient(
chatClient = new AzureOpenAIClient(
endpoint: new Uri(TestConfiguration.AzureOpenAI.Endpoint),
credential: new AzureCliCredential()),
modelId: TestConfiguration.AzureOpenAI.ChatModelId);
credential: new AzureCliCredential())
.GetChatClient(TestConfiguration.OpenAI.ChatModelId)
.AsIChatClient();
}

var functionCallingChatClient = chatClient!.AsKernelFunctionInvokingChatClient();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,15 @@
<None Update="TestData\text-to-image-response.json">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="TestData\filters_chatclient_multiple_function_calls_test_response.json">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="TestData\chat_completion_streaming_chatclient_multiple_function_calls_test_response.txt">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
<None Update="TestData\filters_chatclient_streaming_multiple_function_calls_test_response.txt">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</None>
</ItemGroup>

</Project>
Loading
Loading