Skip to content

.Net Simplify configuration by ServiceId on Multi Model Scenarios. #6416

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
197 changes: 150 additions & 47 deletions dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs
Original file line number Diff line number Diff line change
@@ -1,82 +1,185 @@
// Copyright (c) Microsoft. All rights reserved.

using Microsoft.SemanticKernel;
using xRetry;

namespace ChatCompletion;

public class Connectors_WithMultipleLLMs(ITestOutputHelper output) : BaseTest(output)
{
/// <summary>
/// Show how to run a prompt function and specify a specific service to use.
/// </summary>
[RetryFact(typeof(HttpOperationException))]
public async Task RunAsync()
private const string ChatPrompt = "Hello AI, what can you do for me?";

private static Kernel BuildKernel()
{
Kernel kernel = Kernel.CreateBuilder()
.AddAzureOpenAIChatCompletion(
deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName,
endpoint: TestConfiguration.AzureOpenAI.Endpoint,
apiKey: TestConfiguration.AzureOpenAI.ApiKey,
serviceId: "AzureOpenAIChat",
modelId: TestConfiguration.AzureOpenAI.ChatModelId)
.AddOpenAIChatCompletion(
modelId: TestConfiguration.OpenAI.ChatModelId,
apiKey: TestConfiguration.OpenAI.ApiKey,
serviceId: "OpenAIChat")
.Build();

await RunByServiceIdAsync(kernel, "AzureOpenAIChat");
await RunByModelIdAsync(kernel, TestConfiguration.OpenAI.ChatModelId);
await RunByFirstModelIdAsync(kernel, "gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId);
return Kernel.CreateBuilder()
.AddAzureOpenAIChatCompletion(
deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName,
endpoint: TestConfiguration.AzureOpenAI.Endpoint,
apiKey: TestConfiguration.AzureOpenAI.ApiKey,
serviceId: "AzureOpenAIChat",
modelId: TestConfiguration.AzureOpenAI.ChatModelId)
.AddOpenAIChatCompletion(
modelId: TestConfiguration.OpenAI.ChatModelId,
apiKey: TestConfiguration.OpenAI.ApiKey,
serviceId: "OpenAIChat")
.Build();
}

private async Task RunByServiceIdAsync(Kernel kernel, string serviceId)
/// <summary>
/// Shows how to invoke a prompt and specify the service id of the preferred AI service. When the prompt is executed the AI Service with the matching service id will be selected.
/// </summary>
/// <param name="serviceId">Service Id</param>
[Theory]
[InlineData("AzureOpenAIChat")]
public async Task InvokePromptByServiceIdAsync(string serviceId)
{
var kernel = BuildKernel();
Console.WriteLine($"======== Service Id: {serviceId} ========");

var prompt = "Hello AI, what can you do for me?";
var result = await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings { ServiceId = serviceId }));

KernelArguments arguments = [];
arguments.ExecutionSettings = new Dictionary<string, PromptExecutionSettings>()
{
{ serviceId, new PromptExecutionSettings() }
};
var result = await kernel.InvokePromptAsync(prompt, arguments);
Console.WriteLine(result.GetValue<string>());
}

private async Task RunByModelIdAsync(Kernel kernel, string modelId)
/// <summary>
/// Shows how to invoke a prompt and specify the model id of the preferred AI service. When the prompt is executed the AI Service with the matching model id will be selected.
/// </summary>
[Fact]
private async Task InvokePromptByModelIdAsync()
{
var modelId = TestConfiguration.OpenAI.ChatModelId;
var kernel = BuildKernel();
Console.WriteLine($"======== Model Id: {modelId} ========");

var prompt = "Hello AI, what can you do for me?";
var result = await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings() { ModelId = modelId }));

var result = await kernel.InvokePromptAsync(
prompt,
new(new PromptExecutionSettings()
{
ModelId = modelId
}));
Console.WriteLine(result.GetValue<string>());
}

private async Task RunByFirstModelIdAsync(Kernel kernel, params string[] modelIds)
/// <summary>
/// Shows how to invoke a prompt and specify the service ids of the preferred AI services.
/// When the prompt is executed the AI Service will be selected based on the order of the provided service ids.
/// </summary>
[Fact]
public async Task InvokePromptFunctionWithFirstMatchingServiceIdAsync()
{
string[] serviceIds = ["NotFound", "AzureOpenAIChat", "OpenAIChat"];
var kernel = BuildKernel();
Console.WriteLine($"======== Service Ids: {string.Join(", ", serviceIds)} ========");

var result = await kernel.InvokePromptAsync(ChatPrompt, new(serviceIds.Select(serviceId => new PromptExecutionSettings { ServiceId = serviceId })));

Console.WriteLine(result.GetValue<string>());
}

/// <summary>
/// Shows how to invoke a prompt and specify the model ids of the preferred AI services.
/// When the prompt is executed the AI Service will be selected based on the order of the provided model ids.
/// </summary>
[Fact]
public async Task InvokePromptFunctionWithFirstMatchingModelIdAsync()
{
string[] modelIds = ["gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId];
var kernel = BuildKernel();
Console.WriteLine($"======== Model Ids: {string.Join(", ", modelIds)} ========");

var prompt = "Hello AI, what can you do for me?";
var result = await kernel.InvokePromptAsync(ChatPrompt, new(modelIds.Select((modelId, index) => new PromptExecutionSettings { ServiceId = $"service-{index}", ModelId = modelId })));

var modelSettings = new Dictionary<string, PromptExecutionSettings>();
foreach (var modelId in modelIds)
{
modelSettings.Add(modelId, new PromptExecutionSettings() { ModelId = modelId });
}
var promptConfig = new PromptTemplateConfig(prompt) { Name = "HelloAI", ExecutionSettings = modelSettings };
Console.WriteLine(result.GetValue<string>());
}

/// <summary>
/// Shows how to create a KernelFunction from a prompt and specify the service ids of the preferred AI services.
/// When the function is invoked the AI Service will be selected based on the order of the provided service ids.
/// </summary>
[Fact]
public async Task InvokePreconfiguredFunctionWithFirstMatchingServiceIdAsync()
{
string[] serviceIds = ["NotFound", "AzureOpenAIChat", "OpenAIChat"];
var kernel = BuildKernel();
Console.WriteLine($"======== Service Ids: {string.Join(", ", serviceIds)} ========");

var function = kernel.CreateFunctionFromPrompt(ChatPrompt, serviceIds.Select(serviceId => new PromptExecutionSettings { ServiceId = serviceId }));
var result = await kernel.InvokeAsync(function);

var function = kernel.CreateFunctionFromPrompt(promptConfig);
Console.WriteLine(result.GetValue<string>());
}

/// <summary>
/// Shows how to create a KernelFunction from a prompt and specify the model ids of the preferred AI services.
/// When the function is invoked the AI Service will be selected based on the order of the provided model ids.
/// </summary>
[Fact]
public async Task InvokePreconfiguredFunctionWithFirstMatchingModelIdAsync()
{
string[] modelIds = ["gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId];
var kernel = BuildKernel();

Console.WriteLine($"======== Model Ids: {string.Join(", ", modelIds)} ========");

var function = kernel.CreateFunctionFromPrompt(ChatPrompt, modelIds.Select((modelId, index) => new PromptExecutionSettings { ServiceId = $"service-{index}", ModelId = modelId }));
var result = await kernel.InvokeAsync(function);

Console.WriteLine(result.GetValue<string>());
}

/// <summary>
/// Shows how to invoke a KernelFunction and specify the model id of the AI Service the function will use.
/// </summary>
[Fact]
public async Task InvokePreconfiguredFunctionByModelIdAsync()
{
var modelId = TestConfiguration.OpenAI.ChatModelId;
var kernel = BuildKernel();
Console.WriteLine($"======== Model Id: {modelId} ========");

var function = kernel.CreateFunctionFromPrompt(ChatPrompt);
var result = await kernel.InvokeAsync(function, new(new PromptExecutionSettings { ModelId = modelId }));

Console.WriteLine(result.GetValue<string>());
}

/// <summary>
/// Shows how to invoke a KernelFunction and specify the service id of the AI Service the function will use.
/// </summary>
/// <param name="serviceId">Service Id</param>
[Theory]
[InlineData("AzureOpenAIChat")]
public async Task InvokePreconfiguredFunctionByServiceIdAsync(string serviceId)
{
var kernel = BuildKernel();
Console.WriteLine($"======== Service Id: {serviceId} ========");

var function = kernel.CreateFunctionFromPrompt(ChatPrompt);
var result = await kernel.InvokeAsync(function, new(new PromptExecutionSettings { ServiceId = serviceId }));

Console.WriteLine(result.GetValue<string>());
}

/// <summary>
/// Shows when specifying a non-existent ServiceId the kernel throws an exception.
/// </summary>
/// <param name="serviceId">Service Id</param>
[Theory]
[InlineData("NotFound")]
public async Task InvokePromptByNonExistingServiceIdThrowsExceptionAsync(string serviceId)
{
var kernel = BuildKernel();
Console.WriteLine($"======== Service Id: {serviceId} ========");

await Assert.ThrowsAsync<KernelException>(async () => await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings { ServiceId = serviceId })));
}

/// <summary>
/// Shows how in the execution settings when no model id is found it falls back to the default service.
/// </summary>
/// <param name="modelId">Model Id</param>
[Theory]
[InlineData("NotFound")]
public async Task InvokePromptByNonExistingModelIdUsesDefaultServiceAsync(string modelId)
{
var kernel = BuildKernel();
Console.WriteLine($"======== Model Id: {modelId} ========");

await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings { ModelId = modelId }));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,6 @@ public async Task<FunctionResult> ExecuteFlowAsync(
}

var executor = new FlowExecutor(this._kernelBuilder, this._flowStatusProvider, this._globalPluginCollection, this._config);
return await executor.ExecuteFlowAsync(flow, sessionId, input, kernelArguments ?? new KernelArguments(null)).ConfigureAwait(false);
return await executor.ExecuteFlowAsync(flow, sessionId, input, kernelArguments ?? new KernelArguments()).ConfigureAwait(false);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Diagnostics.CodeAnalysis;
using System.Text.Json.Serialization;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.TextGeneration;
Expand All @@ -27,6 +28,27 @@ public class PromptExecutionSettings
/// </remarks>
public static string DefaultServiceId => "default";

/// <summary>
/// Service identifier.
/// This identifies the service these settings are configured for e.g., azure_openai_eastus, openai, ollama, huggingface, etc.
/// </summary>
/// <remarks>
/// When provided, this service identifier will be the key in a dictionary collection of execution settings for both <see cref="KernelArguments"/> and <see cref="PromptTemplateConfig"/>.
/// If not provided the service identifier will be the default value in <see cref="DefaultServiceId"/>.
/// </remarks>
[Experimental("SKEXP0001")]
[JsonPropertyName("service_id")]
public string? ServiceId
{
get => this._serviceId;

set
{
this.ThrowIfFrozen();
this._serviceId = value;
}
}

/// <summary>
/// Model identifier.
/// This identifies the AI model these settings are configured for e.g., gpt-4, gpt-3.5-turbo
Expand Down Expand Up @@ -93,6 +115,7 @@ public virtual PromptExecutionSettings Clone()
return new()
{
ModelId = this.ModelId,
ServiceId = this.ServiceId,
ExtensionData = this.ExtensionData is not null ? new Dictionary<string, object>(this.ExtensionData) : null
};
}
Expand All @@ -113,6 +136,7 @@ protected void ThrowIfFrozen()

private string? _modelId;
private IDictionary<string, object>? _extensionData;
private string? _serviceId;

#endregion
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ public sealed class KernelArguments : IDictionary<string, object?>, IReadOnlyDic
{
/// <summary>Dictionary of name/values for all the arguments in the instance.</summary>
private readonly Dictionary<string, object?> _arguments;
private IReadOnlyDictionary<string, PromptExecutionSettings>? _executionSettings;

/// <summary>
/// Initializes a new instance of the <see cref="KernelArguments"/> class with the specified AI execution settings.
Expand All @@ -36,12 +37,36 @@ public KernelArguments()
/// </summary>
/// <param name="executionSettings">The prompt execution settings.</param>
public KernelArguments(PromptExecutionSettings? executionSettings)
: this(executionSettings is null ? null : [executionSettings])
{
this._arguments = new(StringComparer.OrdinalIgnoreCase);
}

/// <summary>
/// Initializes a new instance of the <see cref="KernelArguments"/> class with the specified AI execution settings.
/// </summary>
/// <param name="executionSettings">The prompt execution settings.</param>
public KernelArguments(IEnumerable<PromptExecutionSettings>? executionSettings)
{
this._arguments = new(StringComparer.OrdinalIgnoreCase);
if (executionSettings is not null)
{
this.ExecutionSettings = new Dictionary<string, PromptExecutionSettings>() { { PromptExecutionSettings.DefaultServiceId, executionSettings } };
var newExecutionSettings = new Dictionary<string, PromptExecutionSettings>();
foreach (var settings in executionSettings)
{
var targetServiceId = settings.ServiceId ?? PromptExecutionSettings.DefaultServiceId;
if (newExecutionSettings.ContainsKey(targetServiceId))
{
var exceptionMessage = (targetServiceId == PromptExecutionSettings.DefaultServiceId)
? $"Multiple prompt execution settings with the default service id '{PromptExecutionSettings.DefaultServiceId}' or no service id have been provided. Specify a single default prompt execution settings and provide a unique service id for all other instances."
: $"Multiple prompt execution settings with the service id '{targetServiceId}' have been provided. Provide a unique service id for all instances.";

throw new ArgumentException(exceptionMessage, nameof(executionSettings));
}

newExecutionSettings[targetServiceId] = settings;
}

this.ExecutionSettings = newExecutionSettings;
}
}

Expand All @@ -65,7 +90,30 @@ public KernelArguments(IDictionary<string, object?> source, Dictionary<string, P
/// <summary>
/// Gets or sets the prompt execution settings.
/// </summary>
public IReadOnlyDictionary<string, PromptExecutionSettings>? ExecutionSettings { get; set; }
/// <remarks>
/// The settings dictionary is keyed by the service ID, or <see cref="PromptExecutionSettings.DefaultServiceId"/> for the default execution settings.
/// When setting, the service id of each <see cref="PromptExecutionSettings"/> must match the key in the dictionary.
/// </remarks>
public IReadOnlyDictionary<string, PromptExecutionSettings>? ExecutionSettings
{
get => this._executionSettings;
set
{
if (value is { Count: > 0 })
{
foreach (var kv in value!)
{
// Ensures that if a service id is specified it needs to match to the current key in the dictionary.
if (!string.IsNullOrWhiteSpace(kv.Value.ServiceId) && kv.Key != kv.Value.ServiceId)
{
throw new ArgumentException($"Service id '{kv.Value.ServiceId}' must match the key '{kv.Key}'.", nameof(this.ExecutionSettings));
}
}
}

this._executionSettings = value;
}
}

/// <summary>
/// Gets the number of arguments contained in the <see cref="KernelArguments"/>.
Expand Down
Loading
Loading