From 7396a9fb68707fc2ce3613fc502d19a1de0a1e65 Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Wed, 31 Jul 2024 14:58:08 +0100
Subject: [PATCH 1/5] Router initial steps
---
dotnet/SK-dotnet.sln | 11 ++-
.../Demos/AIModelRouter/AIModelRouter.csproj | 20 +++++
dotnet/samples/Demos/AIModelRouter/Program.cs | 76 +++++++++++++++++++
3 files changed, 106 insertions(+), 1 deletion(-)
create mode 100644 dotnet/samples/Demos/AIModelRouter/AIModelRouter.csproj
create mode 100644 dotnet/samples/Demos/AIModelRouter/Program.cs
diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln
index 6574700e6ce6..b6cd87d2040b 100644
--- a/dotnet/SK-dotnet.sln
+++ b/dotnet/SK-dotnet.sln
@@ -318,7 +318,9 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Redis.UnitTests"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Connectors.Qdrant.UnitTests", "src\Connectors\Connectors.Qdrant.UnitTests\Connectors.Qdrant.UnitTests.csproj", "{E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF}"
EndProject
-Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StepwisePlannerMigration", "samples\Demos\StepwisePlannerMigration\StepwisePlannerMigration.csproj", "{38374C62-0263-4FE8-A18C-70FC8132912B}"
+Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "StepwisePlannerMigration", "samples\Demos\StepwisePlannerMigration\StepwisePlannerMigration.csproj", "{38374C62-0263-4FE8-A18C-70FC8132912B}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AIModelRouter", "samples\Demos\AIModelRouter\AIModelRouter.csproj", "{E06818E3-00A5-41AC-97ED-9491070CDEA1}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
@@ -795,6 +797,12 @@ Global
{38374C62-0263-4FE8-A18C-70FC8132912B}.Publish|Any CPU.Build.0 = Debug|Any CPU
{38374C62-0263-4FE8-A18C-70FC8132912B}.Release|Any CPU.ActiveCfg = Release|Any CPU
{38374C62-0263-4FE8-A18C-70FC8132912B}.Release|Any CPU.Build.0 = Release|Any CPU
+ {E06818E3-00A5-41AC-97ED-9491070CDEA1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {E06818E3-00A5-41AC-97ED-9491070CDEA1}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {E06818E3-00A5-41AC-97ED-9491070CDEA1}.Publish|Any CPU.ActiveCfg = Debug|Any CPU
+ {E06818E3-00A5-41AC-97ED-9491070CDEA1}.Publish|Any CPU.Build.0 = Debug|Any CPU
+ {E06818E3-00A5-41AC-97ED-9491070CDEA1}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {E06818E3-00A5-41AC-97ED-9491070CDEA1}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@@ -904,6 +912,7 @@ Global
{1D4667B9-9381-4E32-895F-123B94253EE8} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C}
{E92AE954-8F3A-4A6F-A4F9-DC12017E5AAF} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C}
{38374C62-0263-4FE8-A18C-70FC8132912B} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263}
+ {E06818E3-00A5-41AC-97ED-9491070CDEA1} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {FBDC56A3-86AD-4323-AA0F-201E59123B83}
diff --git a/dotnet/samples/Demos/AIModelRouter/AIModelRouter.csproj b/dotnet/samples/Demos/AIModelRouter/AIModelRouter.csproj
new file mode 100644
index 000000000000..7acfc3bf9986
--- /dev/null
+++ b/dotnet/samples/Demos/AIModelRouter/AIModelRouter.csproj
@@ -0,0 +1,20 @@
+
+
+
+ Exe
+ net8.0;netstandard2.0
+ enable
+ enable
+ c478d0b2-7145-4d1a-9600-3130c04085cd
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/dotnet/samples/Demos/AIModelRouter/Program.cs b/dotnet/samples/Demos/AIModelRouter/Program.cs
new file mode 100644
index 000000000000..c12c6fed6bd2
--- /dev/null
+++ b/dotnet/samples/Demos/AIModelRouter/Program.cs
@@ -0,0 +1,76 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using Microsoft.Extensions.Configuration;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Connectors.OpenAI;
+
+#pragma warning disable SKEXP0001
+#pragma warning disable SKEXP0010
+
+namespace AIModelRouter;
+
+internal class Program
+{
+ private static async Task Main(string[] args)
+ {
+ var config = new ConfigurationBuilder().AddUserSecrets().Build();
+
+ var services = new ServiceCollection();
+ var builder = services.AddKernel()
+ .AddOpenAIChatCompletion(serviceId: "lmstudio", modelId: "phi3", endpoint: new Uri("http://localhost:1234"), apiKey: null)
+ .AddOpenAIChatCompletion(serviceId: "ollama", modelId: "phi3", endpoint: new Uri("http://localhost:11434"), apiKey: null)
+ .AddOpenAIChatCompletion(serviceId: "openai", modelId: "gpt-4o", apiKey: config["OpenAI:ApiKey"]!);
+
+ builder.Services.AddSingleton();
+
+ var kernel = services.BuildServiceProvider().GetRequiredService();
+
+ //kernel.PromptRenderFilters.Add(new PromptRenderingFilter());
+ while (true)
+ {
+ Console.Write("\n\nEnter your message to AI: ");
+ var userMessage = Console.ReadLine();
+
+ if (string.IsNullOrWhiteSpace(userMessage)) { return; }
+ var arguments = new KernelArguments()
+ {
+ ExecutionSettings = new Dictionary() { { new Router().FindService(userMessage),
+ new OpenAIPromptExecutionSettings() { ResponseFormat = "json" } } }
+ };
+
+ Console.Write("\n\nAI response: ");
+
+ await foreach (var chatChunk in kernel.InvokePromptStreamingAsync(userMessage, arguments).ConfigureAwait(false))
+ {
+ Console.Write(chatChunk);
+ }
+ }
+ }
+
+ public class Router()
+ {
+ public string FindService(string prompt)
+ {
+ if (Contains(prompt, "ollama")) { return "ollama"; }
+ else if (Contains(prompt, "openai")) { return "openai"; }
+ return "lmstudio";
+ }
+
+ private static bool Contains(string prompt, string pattern)
+ {
+#pragma warning disable CA2249 // Consider using 'string.Contains' instead of 'string.IndexOf'
+ return prompt.IndexOf(pattern, StringComparison.CurrentCultureIgnoreCase) >= 0;
+#pragma warning restore CA2249 // Consider using 'string.Contains' instead of 'string.IndexOf'
+ }
+ }
+
+ public class PromptRenderingFilter : IPromptRenderFilter
+ {
+ public Task OnPromptRenderAsync(PromptRenderContext context, Func next)
+ {
+ Console.WriteLine($"Rendering prompt for service '{context.Arguments.ExecutionSettings?.FirstOrDefault().Key}'");
+ return next(context);
+ }
+ }
+}
From a72a478f12cf463cc67aca4087f85d9eab00ecd0 Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Thu, 1 Aug 2024 12:52:43 +0100
Subject: [PATCH 2/5] Router demo updated
---
.../Demos/AIModelRouter/AIModelRouter.csproj | 2 +-
.../Demos/AIModelRouter/CustomRouter.cs | 41 ++++++++++++
dotnet/samples/Demos/AIModelRouter/Program.cs | 62 +++++++------------
.../AIModelRouter/PromptRenderingFilter.cs | 26 ++++++++
dotnet/samples/Demos/AIModelRouter/README.md | 15 +++++
5 files changed, 104 insertions(+), 42 deletions(-)
create mode 100644 dotnet/samples/Demos/AIModelRouter/CustomRouter.cs
create mode 100644 dotnet/samples/Demos/AIModelRouter/PromptRenderingFilter.cs
create mode 100644 dotnet/samples/Demos/AIModelRouter/README.md
diff --git a/dotnet/samples/Demos/AIModelRouter/AIModelRouter.csproj b/dotnet/samples/Demos/AIModelRouter/AIModelRouter.csproj
index 7acfc3bf9986..fb5862e3270a 100644
--- a/dotnet/samples/Demos/AIModelRouter/AIModelRouter.csproj
+++ b/dotnet/samples/Demos/AIModelRouter/AIModelRouter.csproj
@@ -5,7 +5,7 @@
net8.0;netstandard2.0
enable
enable
- c478d0b2-7145-4d1a-9600-3130c04085cd
+ 5ee045b0-aea3-4f08-8d31-32d1a6f8fed0
diff --git a/dotnet/samples/Demos/AIModelRouter/CustomRouter.cs b/dotnet/samples/Demos/AIModelRouter/CustomRouter.cs
new file mode 100644
index 000000000000..1c5d081b2122
--- /dev/null
+++ b/dotnet/samples/Demos/AIModelRouter/CustomRouter.cs
@@ -0,0 +1,41 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+#pragma warning disable SKEXP0001
+#pragma warning disable SKEXP0010
+#pragma warning disable CA2249 // Consider using 'string.Contains' instead of 'string.IndexOf'
+
+namespace AIModelRouter;
+
+internal partial class Program
+{
+ ///
+ /// This class is for demonstration purposes only.
+ /// In a real-world scenario, you would use a more sophisticated routing mechanism, such as another local model for
+ /// deciding which service to use based on the user's input or any other criteria.
+ ///
+ public class CustomRouter()
+ {
+ ///
+ /// Returns the best service id to use based on the user's input.
+ /// This demonstration uses a simple logic where your input is checked for specific keywords as a deciding factor,
+ /// if no keyword is found it defaults to the first service in the list.
+ ///
+ /// User's input prompt
+ /// List of service ids to choose from in order of importance, defaulting to the first
+ /// Service id.
+ public string FindService(string lookupPrompt, IReadOnlyList serviceIds)
+ {
+ // The order matters, if the keyword is not found, the last one is used.
+ foreach (var serviceId in serviceIds)
+ {
+ if (Contains(lookupPrompt, serviceId)) { return serviceId; }
+ }
+
+ return serviceIds[0];
+ }
+
+ // Ensure compatibility with both netstandard2.0 and net8.0 by using IndexOf instead of Contains
+ private static bool Contains(string prompt, string pattern)
+ => prompt.IndexOf(pattern, StringComparison.CurrentCultureIgnoreCase) >= 0;
+ }
+}
diff --git a/dotnet/samples/Demos/AIModelRouter/Program.cs b/dotnet/samples/Demos/AIModelRouter/Program.cs
index c12c6fed6bd2..678ff4cdde0b 100644
--- a/dotnet/samples/Demos/AIModelRouter/Program.cs
+++ b/dotnet/samples/Demos/AIModelRouter/Program.cs
@@ -3,74 +3,54 @@
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
#pragma warning disable SKEXP0001
#pragma warning disable SKEXP0010
+#pragma warning disable CA2249 // Consider using 'string.Contains' instead of 'string.IndexOf'
namespace AIModelRouter;
-internal class Program
+internal partial class Program
{
private static async Task Main(string[] args)
{
+ Console.ForegroundColor = ConsoleColor.White;
+
var config = new ConfigurationBuilder().AddUserSecrets().Build();
- var services = new ServiceCollection();
- var builder = services.AddKernel()
- .AddOpenAIChatCompletion(serviceId: "lmstudio", modelId: "phi3", endpoint: new Uri("http://localhost:1234"), apiKey: null)
+ ServiceCollection services = new();
+
+ // Adding multiple connectors targeting different providers / models.
+ services.AddKernel() /* LMStudio model is selected in server side. */
+ .AddOpenAIChatCompletion(serviceId: "lmstudio", modelId: "N/A", endpoint: new Uri("http://localhost:1234"), apiKey: null)
.AddOpenAIChatCompletion(serviceId: "ollama", modelId: "phi3", endpoint: new Uri("http://localhost:11434"), apiKey: null)
- .AddOpenAIChatCompletion(serviceId: "openai", modelId: "gpt-4o", apiKey: config["OpenAI:ApiKey"]!);
+ .AddOpenAIChatCompletion(serviceId: "openai", modelId: "gpt-4o", apiKey: config["OpenAI:ApiKey"]!)
- builder.Services.AddSingleton();
+ // Adding a custom filter to capture router selected service id
+ .Services.AddSingleton(new PromptRenderingFilter());
var kernel = services.BuildServiceProvider().GetRequiredService();
+ var router = new CustomRouter();
- //kernel.PromptRenderFilters.Add(new PromptRenderingFilter());
while (true)
{
- Console.Write("\n\nEnter your message to AI: ");
+ Console.Write("\n\nUser message > ");
var userMessage = Console.ReadLine();
+ // Exit application if the user enters an empty message
if (string.IsNullOrWhiteSpace(userMessage)) { return; }
- var arguments = new KernelArguments()
- {
- ExecutionSettings = new Dictionary() { { new Router().FindService(userMessage),
- new OpenAIPromptExecutionSettings() { ResponseFormat = "json" } } }
- };
- Console.Write("\n\nAI response: ");
+ // Find the best service to use based on the user's input
+ KernelArguments arguments = new(new PromptExecutionSettings()
+ {
+ ServiceId = router.FindService(userMessage, ["lmstudio", "ollama", "openai"])
+ });
+ // Invoke the prompt and print the response
await foreach (var chatChunk in kernel.InvokePromptStreamingAsync(userMessage, arguments).ConfigureAwait(false))
{
Console.Write(chatChunk);
}
}
}
-
- public class Router()
- {
- public string FindService(string prompt)
- {
- if (Contains(prompt, "ollama")) { return "ollama"; }
- else if (Contains(prompt, "openai")) { return "openai"; }
- return "lmstudio";
- }
-
- private static bool Contains(string prompt, string pattern)
- {
-#pragma warning disable CA2249 // Consider using 'string.Contains' instead of 'string.IndexOf'
- return prompt.IndexOf(pattern, StringComparison.CurrentCultureIgnoreCase) >= 0;
-#pragma warning restore CA2249 // Consider using 'string.Contains' instead of 'string.IndexOf'
- }
- }
-
- public class PromptRenderingFilter : IPromptRenderFilter
- {
- public Task OnPromptRenderAsync(PromptRenderContext context, Func next)
- {
- Console.WriteLine($"Rendering prompt for service '{context.Arguments.ExecutionSettings?.FirstOrDefault().Key}'");
- return next(context);
- }
- }
}
diff --git a/dotnet/samples/Demos/AIModelRouter/PromptRenderingFilter.cs b/dotnet/samples/Demos/AIModelRouter/PromptRenderingFilter.cs
new file mode 100644
index 000000000000..510fa1326e5a
--- /dev/null
+++ b/dotnet/samples/Demos/AIModelRouter/PromptRenderingFilter.cs
@@ -0,0 +1,26 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using Microsoft.SemanticKernel;
+
+#pragma warning disable SKEXP0001
+#pragma warning disable SKEXP0010
+#pragma warning disable CA2249 // Consider using 'string.Contains' instead of 'string.IndexOf'
+
+namespace AIModelRouter;
+
+internal partial class Program
+{
+ ///
+ /// Using a filter to log the service being used for the prompt.
+ ///
+ public class PromptRenderingFilter : IPromptRenderFilter
+ {
+ public Task OnPromptRenderAsync(PromptRenderContext context, Func next)
+ {
+ Console.ForegroundColor = ConsoleColor.Yellow;
+ Console.WriteLine($"Selected service id: '{context.Arguments.ExecutionSettings?.FirstOrDefault().Key}'");
+ Console.ForegroundColor = ConsoleColor.White;
+ return next(context);
+ }
+ }
+}
diff --git a/dotnet/samples/Demos/AIModelRouter/README.md b/dotnet/samples/Demos/AIModelRouter/README.md
new file mode 100644
index 000000000000..8c808d5fe1af
--- /dev/null
+++ b/dotnet/samples/Demos/AIModelRouter/README.md
@@ -0,0 +1,15 @@
+# AI Model Router
+
+This sample demonstrates how to use the AI Model Router to route requests to different AI models based on the user's input.
+
+This sample uses LMStudio, Ollama and OpenAI as the AI models and the OpenAI Connector because LMStudio and Ollama provide OpenAI API compatibility.
+
+> [!IMPORTANT]
+> You can modify to use any other combination of connector or OpenAI compatible API model provider.
+
+## Semantic Kernel Features Used
+
+- [Chat Completion Service](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletionService.cs) - Using the Chat Completion Service [OpenAI Connector implementation](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/OpenAIChatCompletionService.cs) to generate responses from the LLM.
+- [Filters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletionService.cs) - Using the Chat Completion Service [OpenAI Connector implementation](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/OpenAIChatCompletionService.cs) to generate responses from the LLM.
+
+
From 01e3facd116cce58b4a290e46ebffaca2c86e020 Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Fri, 2 Aug 2024 14:15:12 +0100
Subject: [PATCH 3/5] SK Router Demo
---
.../Demos/AIModelRouter/CustomRouter.cs | 45 +++++++++----------
dotnet/samples/Demos/AIModelRouter/Program.cs | 4 +-
.../AIModelRouter/PromptRenderingFilter.cs | 26 -----------
dotnet/samples/Demos/AIModelRouter/README.md | 44 ++++++++++++++++--
.../AIModelRouter/SelectedServiceFilter.cs | 27 +++++++++++
5 files changed, 90 insertions(+), 56 deletions(-)
delete mode 100644 dotnet/samples/Demos/AIModelRouter/PromptRenderingFilter.cs
create mode 100644 dotnet/samples/Demos/AIModelRouter/SelectedServiceFilter.cs
diff --git a/dotnet/samples/Demos/AIModelRouter/CustomRouter.cs b/dotnet/samples/Demos/AIModelRouter/CustomRouter.cs
index 1c5d081b2122..1c9941c9c6ce 100644
--- a/dotnet/samples/Demos/AIModelRouter/CustomRouter.cs
+++ b/dotnet/samples/Demos/AIModelRouter/CustomRouter.cs
@@ -6,36 +6,33 @@
namespace AIModelRouter;
-internal partial class Program
+///
+/// This class is for demonstration purposes only.
+/// In a real-world scenario, you would use a more sophisticated routing mechanism, such as another local model for
+/// deciding which service to use based on the user's input or any other criteria.
+///
+public class CustomRouter()
{
///
- /// This class is for demonstration purposes only.
- /// In a real-world scenario, you would use a more sophisticated routing mechanism, such as another local model for
- /// deciding which service to use based on the user's input or any other criteria.
+ /// Returns the best service id to use based on the user's input.
+ /// This demonstration uses a simple logic where your input is checked for specific keywords as a deciding factor,
+ /// if no keyword is found it defaults to the first service in the list.
///
- public class CustomRouter()
+ /// User's input prompt
+ /// List of service ids to choose from in order of importance, defaulting to the first
+ /// Service id.
+ public string FindService(string lookupPrompt, IReadOnlyList serviceIds)
{
- ///
- /// Returns the best service id to use based on the user's input.
- /// This demonstration uses a simple logic where your input is checked for specific keywords as a deciding factor,
- /// if no keyword is found it defaults to the first service in the list.
- ///
- /// User's input prompt
- /// List of service ids to choose from in order of importance, defaulting to the first
- /// Service id.
- public string FindService(string lookupPrompt, IReadOnlyList serviceIds)
+ // The order matters, if the keyword is not found, the last one is used.
+ foreach (var serviceId in serviceIds)
{
- // The order matters, if the keyword is not found, the last one is used.
- foreach (var serviceId in serviceIds)
- {
- if (Contains(lookupPrompt, serviceId)) { return serviceId; }
- }
-
- return serviceIds[0];
+ if (Contains(lookupPrompt, serviceId)) { return serviceId; }
}
- // Ensure compatibility with both netstandard2.0 and net8.0 by using IndexOf instead of Contains
- private static bool Contains(string prompt, string pattern)
- => prompt.IndexOf(pattern, StringComparison.CurrentCultureIgnoreCase) >= 0;
+ return serviceIds[0];
}
+
+ // Ensure compatibility with both netstandard2.0 and net8.0 by using IndexOf instead of Contains
+ private static bool Contains(string prompt, string pattern)
+ => prompt.IndexOf(pattern, StringComparison.CurrentCultureIgnoreCase) >= 0;
}
diff --git a/dotnet/samples/Demos/AIModelRouter/Program.cs b/dotnet/samples/Demos/AIModelRouter/Program.cs
index 678ff4cdde0b..49368a44cd26 100644
--- a/dotnet/samples/Demos/AIModelRouter/Program.cs
+++ b/dotnet/samples/Demos/AIModelRouter/Program.cs
@@ -27,14 +27,14 @@ private static async Task Main(string[] args)
.AddOpenAIChatCompletion(serviceId: "openai", modelId: "gpt-4o", apiKey: config["OpenAI:ApiKey"]!)
// Adding a custom filter to capture router selected service id
- .Services.AddSingleton(new PromptRenderingFilter());
+ .Services.AddSingleton(new SelectedServiceFilter());
var kernel = services.BuildServiceProvider().GetRequiredService();
var router = new CustomRouter();
while (true)
{
- Console.Write("\n\nUser message > ");
+ Console.Write("\n\nUser > ");
var userMessage = Console.ReadLine();
// Exit application if the user enters an empty message
diff --git a/dotnet/samples/Demos/AIModelRouter/PromptRenderingFilter.cs b/dotnet/samples/Demos/AIModelRouter/PromptRenderingFilter.cs
deleted file mode 100644
index 510fa1326e5a..000000000000
--- a/dotnet/samples/Demos/AIModelRouter/PromptRenderingFilter.cs
+++ /dev/null
@@ -1,26 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using Microsoft.SemanticKernel;
-
-#pragma warning disable SKEXP0001
-#pragma warning disable SKEXP0010
-#pragma warning disable CA2249 // Consider using 'string.Contains' instead of 'string.IndexOf'
-
-namespace AIModelRouter;
-
-internal partial class Program
-{
- ///
- /// Using a filter to log the service being used for the prompt.
- ///
- public class PromptRenderingFilter : IPromptRenderFilter
- {
- public Task OnPromptRenderAsync(PromptRenderContext context, Func next)
- {
- Console.ForegroundColor = ConsoleColor.Yellow;
- Console.WriteLine($"Selected service id: '{context.Arguments.ExecutionSettings?.FirstOrDefault().Key}'");
- Console.ForegroundColor = ConsoleColor.White;
- return next(context);
- }
- }
-}
diff --git a/dotnet/samples/Demos/AIModelRouter/README.md b/dotnet/samples/Demos/AIModelRouter/README.md
index 8c808d5fe1af..92ac37e7c81e 100644
--- a/dotnet/samples/Demos/AIModelRouter/README.md
+++ b/dotnet/samples/Demos/AIModelRouter/README.md
@@ -1,8 +1,6 @@
# AI Model Router
-This sample demonstrates how to use the AI Model Router to route requests to different AI models based on the user's input.
-
-This sample uses LMStudio, Ollama and OpenAI as the AI models and the OpenAI Connector because LMStudio and Ollama provide OpenAI API compatibility.
+This sample demonstrates how to implement an AI Model Router using Semantic Kernel connectors to direct requests to various AI models based on user input. As part of this example we integrate LMStudio, Ollama, and OpenAI, utilizing the OpenAI Connector for LMStudio and Ollama due to their compatibility with the OpenAI API.
> [!IMPORTANT]
> You can modify to use any other combination of connector or OpenAI compatible API model provider.
@@ -10,6 +8,44 @@ This sample uses LMStudio, Ollama and OpenAI as the AI models and the OpenAI Con
## Semantic Kernel Features Used
- [Chat Completion Service](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletionService.cs) - Using the Chat Completion Service [OpenAI Connector implementation](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/OpenAIChatCompletionService.cs) to generate responses from the LLM.
-- [Filters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletionService.cs) - Using the Chat Completion Service [OpenAI Connector implementation](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/Connectors/Connectors.OpenAI/ChatCompletion/OpenAIChatCompletionService.cs) to generate responses from the LLM.
+- [Filters](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/IChatCompletionService.cs), using to capture selected service and log in the console.
+
+## Prerequisites
+
+- [.NET 8](https://dotnet.microsoft.com/download/dotnet/8.0).
+
+## Configuring the sample
+
+The sample can be configured by using the command line with .NET [Secret Manager](https://learn.microsoft.com/en-us/aspnet/core/security/app-secrets) to avoid the risk of leaking secrets into the repository, branches and pull requests.
+
+### Using .NET [Secret Manager](https://learn.microsoft.com/en-us/aspnet/core/security/app-secrets)
+
+```powershell
+# OpenAI (Not required if using Azure OpenAI)
+dotnet user-secrets set "OpenAI:ApiKey" "... your api key ... "
+```
+
+## Running the sample
+
+After configuring the sample, to build and run the console application just hit `F5`.
+
+To build and run the console application from the terminal use the following commands:
+
+```powershell
+dotnet build
+dotnet run
+```
+
+### Example of a conversation
+
+> **User** > OpenAI, what is Jupiter? Keep it simple.
+
+> **Assistant** > Sure! Jupiter is the largest planet in our solar system. It's a gas giant, mostly made of hydrogen and helium, and it has a lot of storms, including the famous Great Red Spot. Jupiter also has at least 79 moons.
+
+> **User** > Ollama, what is Jupiter? Keep it simple.
+
+> **Assistant** > Jupiter is a giant planet in our solar system known for being the largest and most massive, famous for its spectacled clouds and dozens of moons including Ganymede which is bigger than Earth!
+> **User** > LMStudio, what is Jupiter? Keep it simple.
+> **Assistant** > Jupiter is the fifth planet from the Sun in our Solar System and one of its gas giants alongside Saturn, Uranus, and Neptune. It's famous for having a massive storm called the Great Red Spot that has been raging for hundreds of years.
\ No newline at end of file
diff --git a/dotnet/samples/Demos/AIModelRouter/SelectedServiceFilter.cs b/dotnet/samples/Demos/AIModelRouter/SelectedServiceFilter.cs
new file mode 100644
index 000000000000..8d962e4d878f
--- /dev/null
+++ b/dotnet/samples/Demos/AIModelRouter/SelectedServiceFilter.cs
@@ -0,0 +1,27 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using Microsoft.SemanticKernel;
+
+#pragma warning disable SKEXP0001
+#pragma warning disable SKEXP0010
+#pragma warning disable CA2249 // Consider using 'string.Contains' instead of 'string.IndexOf'
+
+namespace AIModelRouter;
+
+///
+/// Using a filter to log the service being used for the prompt.
+///
+public class SelectedServiceFilter : IPromptRenderFilter
+{
+ ///
+ public Task OnPromptRenderAsync(PromptRenderContext context, Func next)
+ {
+ Console.ForegroundColor = ConsoleColor.Yellow;
+ Console.WriteLine($"Selected service id: '{context.Arguments.ExecutionSettings?.FirstOrDefault().Key}'");
+
+ Console.ForegroundColor = ConsoleColor.White;
+ Console.Write("Assistant > ");
+ return next(context);
+ }
+}
From 78a58fc35bc584b1237301e2a2799786710c014d Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Fri, 2 Aug 2024 14:26:53 +0100
Subject: [PATCH 4/5] Fix warnings
---
dotnet/samples/Demos/AIModelRouter/Program.cs | 2 +-
dotnet/samples/Demos/AIModelRouter/SelectedServiceFilter.cs | 1 -
2 files changed, 1 insertion(+), 2 deletions(-)
diff --git a/dotnet/samples/Demos/AIModelRouter/Program.cs b/dotnet/samples/Demos/AIModelRouter/Program.cs
index 49368a44cd26..5bafa4934883 100644
--- a/dotnet/samples/Demos/AIModelRouter/Program.cs
+++ b/dotnet/samples/Demos/AIModelRouter/Program.cs
@@ -10,7 +10,7 @@
namespace AIModelRouter;
-internal partial class Program
+internal sealed partial class Program
{
private static async Task Main(string[] args)
{
diff --git a/dotnet/samples/Demos/AIModelRouter/SelectedServiceFilter.cs b/dotnet/samples/Demos/AIModelRouter/SelectedServiceFilter.cs
index 8d962e4d878f..9824d57ebd55 100644
--- a/dotnet/samples/Demos/AIModelRouter/SelectedServiceFilter.cs
+++ b/dotnet/samples/Demos/AIModelRouter/SelectedServiceFilter.cs
@@ -1,6 +1,5 @@
// Copyright (c) Microsoft. All rights reserved.
-using System;
using Microsoft.SemanticKernel;
#pragma warning disable SKEXP0001
From f6f9607bbfb3fed5473ab9dfaa1d3b61498fb81d Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Tue, 6 Aug 2024 15:05:40 +0100
Subject: [PATCH 5/5] Update dotnet/samples/Demos/AIModelRouter/CustomRouter.cs
Co-authored-by: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com>
---
dotnet/samples/Demos/AIModelRouter/CustomRouter.cs | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/dotnet/samples/Demos/AIModelRouter/CustomRouter.cs b/dotnet/samples/Demos/AIModelRouter/CustomRouter.cs
index 1c9941c9c6ce..ff2767a289c8 100644
--- a/dotnet/samples/Demos/AIModelRouter/CustomRouter.cs
+++ b/dotnet/samples/Demos/AIModelRouter/CustomRouter.cs
@@ -23,7 +23,7 @@ public class CustomRouter()
/// Service id.
public string FindService(string lookupPrompt, IReadOnlyList serviceIds)
{
- // The order matters, if the keyword is not found, the last one is used.
+ // The order matters, if the keyword is not found, the first one is used.
foreach (var serviceId in serviceIds)
{
if (Contains(lookupPrompt, serviceId)) { return serviceId; }