Skip to content

.Net: Address some additional review feedback #6289

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -58,10 +58,10 @@ public async Task GetStreamingChatMessageContentsAsync()
[Fact]
public async Task ChatPromptAsync()
{
const string ChatPrompt = @"
<message role=""system"">Respond in French.</message>
<message role=""user"">What is the best French cheese?</message>
";
const string ChatPrompt = """
<message role="system">Respond in French.</message>
<message role="user">What is the best French cheese?</message>
""";

var kernel = Kernel.CreateBuilder()
.AddMistralChatCompletion(
Expand Down
111 changes: 39 additions & 72 deletions dotnet/samples/Concepts/ChatCompletion/MistralAI_FunctionCalling.cs
Original file line number Diff line number Diff line change
Expand Up @@ -17,23 +17,13 @@ public sealed class MistralAI_FunctionCalling(ITestOutputHelper output) : BaseTe
[Fact]
public async Task AutoInvokeKernelFunctionsAsync()
{
// Create a logging handler to output HTTP requests and responses
var handler = new LoggingHandler(new HttpClientHandler(), this.Output);
HttpClient httpClient = new(handler);

// Create a kernel with MistralAI chat completion and WeatherPlugin
IKernelBuilder kernelBuilder = Kernel.CreateBuilder();
kernelBuilder.AddMistralChatCompletion(
modelId: TestConfiguration.MistralAI.ChatModelId!,
apiKey: TestConfiguration.MistralAI.ApiKey!,
httpClient: httpClient);
kernelBuilder.Plugins.AddFromType<WeatherPlugin>();
Kernel kernel = kernelBuilder.Build();
Kernel kernel = this.CreateKernelWithWeatherPlugin();

// Invoke chat prompt with auto invocation of functions enabled
const string ChatPrompt = @"
<message role=""user"">What is the weather like in Paris?</message>
";
const string ChatPrompt = """
<message role="user">What is the weather like in Paris?</message>
""";
var executionSettings = new MistralAIPromptExecutionSettings { ToolCallBehavior = MistralAIToolCallBehavior.AutoInvokeKernelFunctions };
var chatSemanticFunction = kernel.CreateFunctionFromPrompt(
ChatPrompt, executionSettings);
Expand All @@ -45,18 +35,8 @@ public async Task AutoInvokeKernelFunctionsAsync()
[Fact]
public async Task AutoInvokeKernelFunctionsMultipleCallsAsync()
{
// Create a logging handler to output HTTP requests and responses
var handler = new LoggingHandler(new HttpClientHandler(), this.Output);
HttpClient httpClient = new(handler);

// Create a kernel with MistralAI chat completion and WeatherPlugin
IKernelBuilder kernelBuilder = Kernel.CreateBuilder();
kernelBuilder.AddMistralChatCompletion(
modelId: TestConfiguration.MistralAI.ChatModelId!,
apiKey: TestConfiguration.MistralAI.ApiKey!,
httpClient: httpClient);
kernelBuilder.Plugins.AddFromType<WeatherPlugin>();
Kernel kernel = kernelBuilder.Build();
Kernel kernel = this.CreateKernelWithWeatherPlugin();
var service = kernel.GetRequiredService<IChatCompletionService>();

// Invoke chat prompt with auto invocation of functions enabled
Expand All @@ -65,37 +45,27 @@ public async Task AutoInvokeKernelFunctionsMultipleCallsAsync()
new ChatMessageContent(AuthorRole.User, "What is the weather like in Paris?")
};
var executionSettings = new MistralAIPromptExecutionSettings { ToolCallBehavior = MistralAIToolCallBehavior.AutoInvokeKernelFunctions };
var result1 = await service.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel);
chatHistory.AddRange(result1);
var chatPromptResult1 = await service.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel);
chatHistory.AddRange(chatPromptResult1);

chatHistory.Add(new ChatMessageContent(AuthorRole.User, "What is the weather like in Marseille?"));
var result2 = await service.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel);
var chatPromptResult2 = await service.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel);

Console.WriteLine(result1[0].Content);
Console.WriteLine(result2[0].Content);
Console.WriteLine(chatPromptResult1[0].Content);
Console.WriteLine(chatPromptResult2[0].Content);
}

[Fact]
public async Task RequiredKernelFunctionsAsync()
{
// Create a logging handler to output HTTP requests and responses
var handler = new LoggingHandler(new HttpClientHandler(), this.Output);
HttpClient httpClient = new(handler);

// Create a kernel with MistralAI chat completion and WeatherPlugin
IKernelBuilder kernelBuilder = Kernel.CreateBuilder();
kernelBuilder.AddMistralChatCompletion(
modelId: TestConfiguration.MistralAI.ChatModelId!,
apiKey: TestConfiguration.MistralAI.ApiKey!,
httpClient: httpClient);
kernelBuilder.Plugins.AddFromType<WeatherPlugin>();
Kernel kernel = kernelBuilder.Build();
Kernel kernel = this.CreateKernelWithWeatherPlugin();
var plugin = kernel.Plugins.First();

// Invoke chat prompt with auto invocation of functions enabled
const string ChatPrompt = @"
<message role=""user"">What is the weather like in Paris?</message>
";
const string ChatPrompt = """
<message role="user">What is the weather like in Paris?</message>
""";
var executionSettings = new MistralAIPromptExecutionSettings
{
ToolCallBehavior = MistralAIToolCallBehavior.RequiredFunctions(plugin, true)
Expand All @@ -110,23 +80,13 @@ public async Task RequiredKernelFunctionsAsync()
[Fact]
public async Task NoKernelFunctionsAsync()
{
// Create a logging handler to output HTTP requests and responses
var handler = new LoggingHandler(new HttpClientHandler(), this.Output);
HttpClient httpClient = new(handler);

// Create a kernel with MistralAI chat completion and WeatherPlugin
IKernelBuilder kernelBuilder = Kernel.CreateBuilder();
kernelBuilder.AddMistralChatCompletion(
modelId: TestConfiguration.MistralAI.ChatModelId!,
apiKey: TestConfiguration.MistralAI.ApiKey!,
httpClient: httpClient);
kernelBuilder.Plugins.AddFromType<WeatherPlugin>();
Kernel kernel = kernelBuilder.Build();
Kernel kernel = this.CreateKernelWithWeatherPlugin();

// Invoke chat prompt with auto invocation of functions enabled
const string ChatPrompt = @"
<message role=""user"">What is the weather like in Paris?</message>
";
const string ChatPrompt = """
<message role="user">What is the weather like in Paris?</message>
""";
var executionSettings = new MistralAIPromptExecutionSettings
{
ToolCallBehavior = MistralAIToolCallBehavior.NoKernelFunctions
Expand All @@ -141,19 +101,9 @@ public async Task NoKernelFunctionsAsync()
[Fact]
public async Task AutoInvokeKernelFunctionsMultiplePluginsAsync()
{
// Create a logging handler to output HTTP requests and responses
var handler = new LoggingHandler(new HttpClientHandler(), this.Output);
HttpClient httpClient = new(handler);

// Create a kernel with MistralAI chat completion and WeatherPlugin
IKernelBuilder kernelBuilder = Kernel.CreateBuilder();
kernelBuilder.AddMistralChatCompletion(
modelId: TestConfiguration.MistralAI.ChatModelId!,
apiKey: TestConfiguration.MistralAI.ApiKey!,
httpClient: httpClient);
kernelBuilder.Plugins.AddFromType<WeatherPlugin>();
kernelBuilder.Plugins.AddFromType<WidgetFactory>();
Kernel kernel = kernelBuilder.Build();
// Create a kernel with MistralAI chat completion and WeatherPlugin and WidgetPlugin
Kernel kernel = this.CreateKernelWithWeatherPlugin();
kernel.Plugins.AddFromType<WidgetPlugin>();

// Invoke chat prompt with auto invocation of functions enabled
const string ChatPrompt = """
Expand All @@ -176,7 +126,7 @@ public string GetWeather(
) => "12°C\nWind: 11 KMPH\nHumidity: 48%\nMostly cloudy";
}

public sealed class WidgetFactory
public sealed class WidgetPlugin
{
[KernelFunction]
[Description("Creates a new widget of the specified type and colors")]
Expand All @@ -199,4 +149,21 @@ public enum WidgetColor
[Description("Use when creating a blue item.")]
Blue
}

private Kernel CreateKernelWithWeatherPlugin()
{
// Create a logging handler to output HTTP requests and responses
var handler = new LoggingHandler(new HttpClientHandler(), this.Output);
HttpClient httpClient = new(handler);

// Create a kernel with MistralAI chat completion and WeatherPlugin
IKernelBuilder kernelBuilder = Kernel.CreateBuilder();
kernelBuilder.AddMistralChatCompletion(
modelId: TestConfiguration.MistralAI.ChatModelId!,
apiKey: TestConfiguration.MistralAI.ApiKey!,
httpClient: httpClient);
kernelBuilder.Plugins.AddFromType<WeatherPlugin>();
Kernel kernel = kernelBuilder.Build();
return kernel;
}
}
51 changes: 23 additions & 28 deletions dotnet/samples/Concepts/ChatCompletion/OpenAI_FunctionCalling.cs
Original file line number Diff line number Diff line change
Expand Up @@ -11,25 +11,13 @@ public sealed class OpenAI_FunctionCalling(ITestOutputHelper output) : BaseTest(
[Fact]
public async Task AutoInvokeKernelFunctionsAsync()
{
// Create a logging handler to output HTTP requests and responses
var handler = new LoggingHandler(new HttpClientHandler(), this.Output);
HttpClient httpClient = new(handler);

OpenAIChatCompletionService chatCompletionService = new(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey);

// Create a kernel with OpenAI chat completion and WeatherPlugin
IKernelBuilder kernelBuilder = Kernel.CreateBuilder();
kernelBuilder.AddOpenAIChatCompletion(
modelId: TestConfiguration.OpenAI.ChatModelId!,
apiKey: TestConfiguration.OpenAI.ApiKey!,
httpClient: httpClient);
kernelBuilder.Plugins.AddFromType<WeatherPlugin>();
Kernel kernel = kernelBuilder.Build();
// Create a kernel with MistralAI chat completion and WeatherPlugin
Kernel kernel = CreateKernelWithWeatherPlugin();

// Invoke chat prompt with auto invocation of functions enabled
const string ChatPrompt = @"
<message role=""user"">What is the weather like in Paris?</message>
";
const string ChatPrompt = """
<message role="user">What is the weather like in Paris?</message>
""";
var executionSettings = new OpenAIPromptExecutionSettings { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions };
var chatSemanticFunction = kernel.CreateFunctionFromPrompt(
ChatPrompt, executionSettings);
Expand All @@ -41,18 +29,8 @@ public async Task AutoInvokeKernelFunctionsAsync()
[Fact]
public async Task AutoInvokeKernelFunctionsMultipleCallsAsync()
{
// Create a logging handler to output HTTP requests and responses
var handler = new LoggingHandler(new HttpClientHandler(), this.Output);
HttpClient httpClient = new(handler);

// Create a kernel with MistralAI chat completion and WeatherPlugin
IKernelBuilder kernelBuilder = Kernel.CreateBuilder();
kernelBuilder.AddOpenAIChatCompletion(
modelId: TestConfiguration.OpenAI.ChatModelId!,
apiKey: TestConfiguration.OpenAI.ApiKey!,
httpClient: httpClient);
kernelBuilder.Plugins.AddFromType<WeatherPlugin>();
Kernel kernel = kernelBuilder.Build();
Kernel kernel = CreateKernelWithWeatherPlugin();
var service = kernel.GetRequiredService<IChatCompletionService>();

// Invoke chat prompt with auto invocation of functions enabled
Expand All @@ -79,4 +57,21 @@ public string GetWeather(
[Description("The city and department, e.g. Marseille, 13")] string location
) => "12°C\nWind: 11 KMPH\nHumidity: 48%\nMostly cloudy";
}

private Kernel CreateKernelWithWeatherPlugin()
{
// Create a logging handler to output HTTP requests and responses
var handler = new LoggingHandler(new HttpClientHandler(), this.Output);
HttpClient httpClient = new(handler);

// Create a kernel with OpenAI chat completion and WeatherPlugin
IKernelBuilder kernelBuilder = Kernel.CreateBuilder();
kernelBuilder.AddOpenAIChatCompletion(
modelId: TestConfiguration.OpenAI.ChatModelId!,
apiKey: TestConfiguration.OpenAI.ApiKey!,
httpClient: httpClient);
kernelBuilder.Plugins.AddFromType<WeatherPlugin>();
Kernel kernel = kernelBuilder.Build();
return kernel;
}
}
4 changes: 4 additions & 0 deletions dotnet/samples/Concepts/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,10 @@ Down below you can find the code snippets that demonstrate the usage of many Sem
- [OpenAI_ChatCompletionWithVision](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionWithVision.cs)
- [OpenAI_CustomAzureOpenAIClient](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomAzureOpenAIClient.cs)
- [OpenAI_UsingLogitBias](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_UsingLogitBias.cs)
- [OpenAI_FunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/OpenAI_FunctionCalling.cs)
- [MistralAI_ChatPrompt](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MistralAI_ChatPrompt.cs)
- [MistralAI_FunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MistralAI_FunctionCalling.cs)
- [MistralAI_StreamingFunctionCalling](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/ChatCompletion/MistralAI_StreamingFunctionCalling.cs)

## DependencyInjection - Examples on using `DI Container`

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
<PropertyGroup>
<AssemblyName>SemanticKernel.Connectors.MistralAI.UnitTests</AssemblyName>
<RootNamespace>SemanticKernel.Connectors.MistralAI.UnitTests</RootNamespace>
<TargetFramework>net6.0</TargetFramework>
<TargetFramework>net8.0</TargetFramework>
<LangVersion>12</LangVersion>
<RollForward>LatestMajor</RollForward>
<IsTestProject>true</IsTestProject>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ public async Task ValidateGenerateEmbeddingsAsync()
var service = new MistralAITextEmbeddingGenerationService("mistral-small-latest", "key", httpClient: this.HttpClient);

// Act
List<string> data = new() { "Hello", "world" };
List<string> data = ["Hello", "world"];
var response = await service.GenerateEmbeddingsAsync(data, default);

// Assert
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -526,7 +526,7 @@ private void ValidateChatHistory(ChatHistory chatHistory)
var firstRole = chatHistory[0].Role.ToString();
if (firstRole is not "system" && firstRole is not "user")
{
throw new ArgumentException("First message in chat history should have system or user role", nameof(chatHistory));
throw new ArgumentException("The first message in chat history must have either the system or user role", nameof(chatHistory));
}
}

Expand Down Expand Up @@ -817,7 +817,7 @@ private void AddResponseMessage(ChatCompletionRequest chatRequest, ChatHistory c

private static Dictionary<string, object?> GetChatChoiceMetadata(MistralChatCompletionChunk completionChunk, MistralChatCompletionChoice chatChoice)
{
return new Dictionary<string, object?>(6)
return new Dictionary<string, object?>(7)
{
{ nameof(completionChunk.Id), completionChunk.Id },
{ nameof(completionChunk.Object), completionChunk.Object },
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,8 @@ public static IKernelBuilder AddMistralChatCompletion(
HttpClient? httpClient = null)
{
Verify.NotNull(builder);
Verify.NotNull(modelId);
Verify.NotNullOrWhiteSpace(modelId);
Verify.NotNullOrWhiteSpace(apiKey);

builder.Services.AddKeyedSingleton<IChatCompletionService>(serviceId, (serviceProvider, _) =>
new MistralAIChatCompletionService(modelId, apiKey, endpoint, HttpClientProvider.GetHttpClient(httpClient, serviceProvider)));
Expand All @@ -61,7 +62,6 @@ public static IKernelBuilder AddMistralTextEmbeddingGeneration(
HttpClient? httpClient = null)
{
Verify.NotNull(builder);
Verify.NotNull(modelId);

builder.Services.AddKeyedSingleton<ITextEmbeddingGenerationService>(serviceId, (serviceProvider, _) =>
new MistralAITextEmbeddingGenerationService(modelId, apiKey, endpoint, HttpClientProvider.GetHttpClient(httpClient, serviceProvider)));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,45 +18,43 @@ public static class MistralAIServiceCollectionExtensions
/// Adds an Mistral chat completion service with the specified configuration.
/// </summary>
/// <param name="services">The <see cref="IServiceCollection"/> instance to augment.</param>
/// <param name="model">The name of the Mistral model.</param>
/// <param name="modelId">The name of the Mistral modelId.</param>
/// <param name="apiKey">The API key required for accessing the Mistral service.</param>
/// <param name="endpoint">Optional uri endpoint including the port where MistralAI server is hosted. Default is https://api.mistral.ai.</param>
/// <param name="serviceId">A local identifier for the given AI service.</param>
/// <returns>The same instance as <paramref name="services"/>.</returns>
public static IServiceCollection AddMistralChatCompletion(
this IServiceCollection services,
string model,
string modelId,
string apiKey,
Uri? endpoint = null,
string? serviceId = null)
{
Verify.NotNull(services);
Verify.NotNull(model);

return services.AddKeyedSingleton<IChatCompletionService>(serviceId, (serviceProvider, _) =>
new MistralAIChatCompletionService(model, apiKey, endpoint, HttpClientProvider.GetHttpClient(serviceProvider)));
new MistralAIChatCompletionService(modelId, apiKey, endpoint, HttpClientProvider.GetHttpClient(serviceProvider)));
}

/// <summary>
/// Adds an Mistral text embedding generation service with the specified configuration.
/// </summary>
/// <param name="services">The <see cref="IServiceCollection"/> instance to augment.</param>
/// <param name="model">The name of theMistral model.</param>
/// <param name="modelId">The name of theMistral modelId.</param>
/// <param name="apiKey">The API key required for accessing the Mistral service.</param>
/// <param name="endpoint">Optional uri endpoint including the port where MistralAI server is hosted. Default is https://api.mistral.ai.</param>
/// <param name="serviceId">A local identifier for the given AI service.</param>
/// <returns>The same instance as <paramref name="services"/>.</returns>
public static IServiceCollection AddMistralTextEmbeddingGeneration(
this IServiceCollection services,
string model,
string modelId,
string apiKey,
Uri? endpoint = null,
string? serviceId = null)
{
Verify.NotNull(services);
Verify.NotNull(model);

return services.AddKeyedSingleton<ITextEmbeddingGenerationService>(serviceId, (serviceProvider, _) =>
new MistralAITextEmbeddingGenerationService(model, apiKey, endpoint, HttpClientProvider.GetHttpClient(serviceProvider)));
new MistralAITextEmbeddingGenerationService(modelId, apiKey, endpoint, HttpClientProvider.GetHttpClient(serviceProvider)));
}
}
Loading
Loading