diff --git a/.github/_typos.toml b/.github/_typos.toml
index 917745e1ae83..08b4ab37f906 100644
--- a/.github/_typos.toml
+++ b/.github/_typos.toml
@@ -16,6 +16,7 @@ extend-exclude = [
"test_code_tokenizer.py",
"*response.json",
"test_content.txt",
+ "serializedChatHistoryV1_15_1.json"
]
[default.extend-words]
diff --git a/.github/workflows/dotnet-build-and-test.yml b/.github/workflows/dotnet-build-and-test.yml
index 366934c73314..dd83478b508b 100644
--- a/.github/workflows/dotnet-build-and-test.yml
+++ b/.github/workflows/dotnet-build-and-test.yml
@@ -96,6 +96,7 @@ jobs:
AzureOpenAI__Label: azure-text-davinci-003
AzureOpenAIEmbedding__Label: azure-text-embedding-ada-002
AzureOpenAI__DeploymentName: ${{ vars.AZUREOPENAI__DEPLOYMENTNAME }}
+ AzureOpenAI__ChatDeploymentName: ${{ vars.AZUREOPENAI__CHATDEPLOYMENTNAME }}
AzureOpenAIEmbeddings__DeploymentName: ${{ vars.AZUREOPENAIEMBEDDING__DEPLOYMENTNAME }}
AzureOpenAI__Endpoint: ${{ secrets.AZUREOPENAI__ENDPOINT }}
AzureOpenAIEmbeddings__Endpoint: ${{ secrets.AZUREOPENAI_EASTUS__ENDPOINT }}
@@ -110,14 +111,20 @@ jobs:
OpenAITextToAudio__ModelId: ${{ vars.OPENAITEXTTOAUDIO__MODELID }}
OpenAIAudioToText__ApiKey: ${{ secrets.OPENAIAUDIOTOTEXT__APIKEY }}
OpenAIAudioToText__ModelId: ${{ vars.OPENAIAUDIOTOTEXT__MODELID }}
+ OpenAITextToImage__ApiKey: ${{ secrets.OPENAITEXTTOIMAGE__APIKEY }}
+ OpenAITextToImage__ModelId: ${{ vars.OPENAITEXTTOIMAGE__MODELID }}
AzureOpenAITextToAudio__ApiKey: ${{ secrets.AZUREOPENAITEXTTOAUDIO__APIKEY }}
AzureOpenAITextToAudio__Endpoint: ${{ secrets.AZUREOPENAITEXTTOAUDIO__ENDPOINT }}
AzureOpenAITextToAudio__DeploymentName: ${{ vars.AZUREOPENAITEXTTOAUDIO__DEPLOYMENTNAME }}
AzureOpenAIAudioToText__ApiKey: ${{ secrets.AZUREOPENAIAUDIOTOTEXT__APIKEY }}
AzureOpenAIAudioToText__Endpoint: ${{ secrets.AZUREOPENAIAUDIOTOTEXT__ENDPOINT }}
AzureOpenAIAudioToText__DeploymentName: ${{ vars.AZUREOPENAIAUDIOTOTEXT__DEPLOYMENTNAME }}
+ AzureOpenAITextToImage__ApiKey: ${{ secrets.AZUREOPENAITEXTTOIMAGE__APIKEY }}
+ AzureOpenAITextToImage__Endpoint: ${{ secrets.AZUREOPENAITEXTTOIMAGE__ENDPOINT }}
+ AzureOpenAITextToImage__DeploymentName: ${{ vars.AZUREOPENAITEXTTOIMAGE__DEPLOYMENTNAME }}
Bing__ApiKey: ${{ secrets.BING__APIKEY }}
OpenAI__ApiKey: ${{ secrets.OPENAI__APIKEY }}
+ OpenAI__ChatModelId: ${{ vars.OPENAI__CHATMODELID }}
# Generate test reports and check coverage
- name: Generate test reports
@@ -126,7 +133,7 @@ jobs:
reports: "./TestResults/Coverage/**/coverage.cobertura.xml"
targetdir: "./TestResults/Reports"
reporttypes: "JsonSummary"
- assemblyfilters: "+Microsoft.SemanticKernel.Abstractions;+Microsoft.SemanticKernel.Core;+Microsoft.SemanticKernel.PromptTemplates.Handlebars;+Microsoft.SemanticKernel.Connectors.OpenAI;+Microsoft.SemanticKernel.Yaml;+Microsoft.SemanticKernel.Agents.Abstractions;+Microsoft.SemanticKernel.Agents.Core;+Microsoft.SemanticKernel.Agents.OpenAI"
+ assemblyfilters: "+Microsoft.SemanticKernel.Abstractions;+Microsoft.SemanticKernel.Core;+Microsoft.SemanticKernel.PromptTemplates.Handlebars;+Microsoft.SemanticKernel.Connectors.OpenAI;+Microsoft.SemanticKernel.Connectors.AzureOpenAI;+Microsoft.SemanticKernel.Yaml;+Microsoft.SemanticKernel.Agents.Abstractions;+Microsoft.SemanticKernel.Agents.Core;+Microsoft.SemanticKernel.Agents.OpenAI"
- name: Check coverage
shell: pwsh
diff --git a/dotnet/Directory.Build.props b/dotnet/Directory.Build.props
index 751afab85104..94d748c78057 100644
--- a/dotnet/Directory.Build.props
+++ b/dotnet/Directory.Build.props
@@ -11,6 +11,11 @@
disable
+
+
+ false
+
+
disable
@@ -30,4 +35,4 @@
<_Parameter1>false
-
\ No newline at end of file
+
diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props
index e2f8f0ce3e89..75d19fe11d0b 100644
--- a/dotnet/Directory.Packages.props
+++ b/dotnet/Directory.Packages.props
@@ -5,9 +5,10 @@
true
+
+
-
-
+
diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln
index a3bc4f9a380d..cb04656ffb01 100644
--- a/dotnet/SK-dotnet.sln
+++ b/dotnet/SK-dotnet.sln
@@ -326,6 +326,12 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AIModelRouter", "samples\De
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Connectors.AzureCosmosDBNoSQL.UnitTests", "src\Connectors\Connectors.AzureCosmosDBNoSQL.UnitTests\Connectors.AzureCosmosDBNoSQL.UnitTests.csproj", "{385A8FE5-87E2-4458-AE09-35E10BD2E67F}"
EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Connectors.OpenAI.UnitTests", "src\Connectors\Connectors.OpenAI.UnitTests\Connectors.OpenAI.UnitTests.csproj", "{36DDC119-C030-407E-AC51-A877E9E0F660}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Connectors.AzureOpenAI", "src\Connectors\Connectors.AzureOpenAI\Connectors.AzureOpenAI.csproj", "{7AAD7388-307D-41FB-B80A-EF9E3A4E31F0}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Connectors.AzureOpenAI.UnitTests", "src\Connectors\Connectors.AzureOpenAI.UnitTests\Connectors.AzureOpenAI.UnitTests.csproj", "{8CF06B22-50F3-4F71-A002-622DB49DF0F5}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -819,6 +825,24 @@ Global
{385A8FE5-87E2-4458-AE09-35E10BD2E67F}.Publish|Any CPU.Build.0 = Debug|Any CPU
{385A8FE5-87E2-4458-AE09-35E10BD2E67F}.Release|Any CPU.ActiveCfg = Release|Any CPU
{385A8FE5-87E2-4458-AE09-35E10BD2E67F}.Release|Any CPU.Build.0 = Release|Any CPU
+ {36DDC119-C030-407E-AC51-A877E9E0F660}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {36DDC119-C030-407E-AC51-A877E9E0F660}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {36DDC119-C030-407E-AC51-A877E9E0F660}.Publish|Any CPU.ActiveCfg = Debug|Any CPU
+ {36DDC119-C030-407E-AC51-A877E9E0F660}.Publish|Any CPU.Build.0 = Debug|Any CPU
+ {36DDC119-C030-407E-AC51-A877E9E0F660}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {36DDC119-C030-407E-AC51-A877E9E0F660}.Release|Any CPU.Build.0 = Release|Any CPU
+ {7AAD7388-307D-41FB-B80A-EF9E3A4E31F0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {7AAD7388-307D-41FB-B80A-EF9E3A4E31F0}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {7AAD7388-307D-41FB-B80A-EF9E3A4E31F0}.Publish|Any CPU.ActiveCfg = Publish|Any CPU
+ {7AAD7388-307D-41FB-B80A-EF9E3A4E31F0}.Publish|Any CPU.Build.0 = Publish|Any CPU
+ {7AAD7388-307D-41FB-B80A-EF9E3A4E31F0}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {7AAD7388-307D-41FB-B80A-EF9E3A4E31F0}.Release|Any CPU.Build.0 = Release|Any CPU
+ {8CF06B22-50F3-4F71-A002-622DB49DF0F5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {8CF06B22-50F3-4F71-A002-622DB49DF0F5}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {8CF06B22-50F3-4F71-A002-622DB49DF0F5}.Publish|Any CPU.ActiveCfg = Debug|Any CPU
+ {8CF06B22-50F3-4F71-A002-622DB49DF0F5}.Publish|Any CPU.Build.0 = Debug|Any CPU
+ {8CF06B22-50F3-4F71-A002-622DB49DF0F5}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {8CF06B22-50F3-4F71-A002-622DB49DF0F5}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@@ -931,6 +955,9 @@ Global
{2918478E-BC86-4D53-9D01-9C318F80C14F} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C}
{E06818E3-00A5-41AC-97ED-9491070CDEA1} = {5D4C0700-BBB5-418F-A7B2-F392B9A18263}
{385A8FE5-87E2-4458-AE09-35E10BD2E67F} = {0247C2C9-86C3-45BA-8873-28B0948EDC0C}
+ {36DDC119-C030-407E-AC51-A877E9E0F660} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1}
+ {7AAD7388-307D-41FB-B80A-EF9E3A4E31F0} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1}
+ {8CF06B22-50F3-4F71-A002-622DB49DF0F5} = {1B4CBDE0-10C2-4E7D-9CD0-FE7586C96ED1}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {FBDC56A3-86AD-4323-AA0F-201E59123B83}
diff --git a/dotnet/docs/OPENAI-CONNECTOR-MIGRATION.md b/dotnet/docs/OPENAI-CONNECTOR-MIGRATION.md
new file mode 100644
index 000000000000..784de5347fb0
--- /dev/null
+++ b/dotnet/docs/OPENAI-CONNECTOR-MIGRATION.md
@@ -0,0 +1,196 @@
+# OpenAI Connector Migration Guide
+
+This manual prepares you for the migration of your OpenAI Connector to the new OpenAI Connector. The new OpenAI Connector is a complete rewrite of the existing OpenAI Connector and is designed to be more efficient, reliable, and scalable. This manual will guide you through the migration process and help you understand the changes that have been made to the OpenAI Connector.
+
+## 1. Package Setup when Using Azure
+
+If you are working with Azure and or OpenAI public APIs, you will need to change the package from `Microsoft.SemanticKernel.Connectors.OpenAI` to `Microsoft.SemanticKernel.Connectors.AzureOpenAI`,
+
+> [!IMPORTANT]
+> The `Microsoft.SemanticKernel.Connectors.AzureOpenAI` package depends on the `Microsoft.SemanticKernel.Connectors.OpenAI` package so there's no need to add both to your project when using `OpenAI` related types.
+
+```diff
+- // Before
+- using Microsoft.SemanticKernel.Connectors.OpenAI;
++ After
++ using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+```
+
+### 1.1 AzureOpenAIClient
+
+When using Azure with OpenAI, before where you were using `OpenAIClient` you will need to update your code to use the new `AzureOpenAIClient` type.
+
+### 1.2 Services
+
+All services below now belong to the `Microsoft.SemanticKernel.Connectors.AzureOpenAI` namespace.
+
+- `AzureOpenAIAudioToTextService`
+- `AzureOpenAIChatCompletionService`
+- `AzureOpenAITextEmbeddingGenerationService`
+- `AzureOpenAITextToAudioService`
+- `AzureOpenAITextToImageService`
+
+## 2. Text Generation Deprecated
+
+The latest `OpenAI` SDK does not support text generation modality, when migrating to their underlying SDK we had to drop the support and removed `TextGeneration` specific services but the existing `ChatCompletion` ones still supports (implements `ITextGenerationService`).
+
+If you were using any of the `OpenAITextGenerationService` or `AzureOpenAITextGenerationService` you will need to update your code to target a chat completion model instead, using `OpenAIChatCompletionService` or `AzureOpenAIChatCompletionService` instead.
+
+> [!NOTE]
+> OpenAI and AzureOpenAI `ChatCompletion` services also implement the `ITextGenerationService` interface and that may not require any changes to your code if you were targeting the `ITextGenerationService` interface.
+
+tags:
+`OpenAITextGenerationService`,`AzureOpenAITextGenerationService`,
+`AddOpenAITextGeneration`,`AddAzureOpenAITextGeneration`
+
+## 3. ChatCompletion Multiple Choices Deprecated
+
+The latest `OpenAI` SDK does not support multiple choices, when migrating to their underlying SDK we had to drop the support and removed `ResultsPerPrompt` also from the `OpenAIPromptExecutionSettings`.
+
+tags: `ResultsPerPrompt`,`results_per_prompt`
+
+## 4. OpenAI File Service Deprecation
+
+The `OpenAIFileService` was deprecated in the latest version of the OpenAI Connector. We strongly recommend to update your code to use the new `OpenAIClient.GetFileClient()` for file management operations.
+
+## 5. SemanticKernel MetaPackage
+
+To be retro compatible with the new OpenAI and AzureOpenAI Connectors, our `Microsoft.SemanticKernel` meta package changed its dependency to use the new `Microsoft.SemanticKernel.Connectors.AzureOpenAI` package that depends on the `Microsoft.SemanticKernel.Connectors.OpenAI` package. This way if you are using the metapackage, no change is needed to get access to `Azure` related types.
+
+## 6. Contents
+
+### 6.1 OpenAIChatMessageContent
+
+- The `Tools` property type has changed from `IReadOnlyList` to `IReadOnlyList`.
+
+- Inner content type has changed from `ChatCompletionsFunctionToolCall` to `ChatToolCall`.
+
+- Metadata type `FunctionToolCalls` has changed from `IEnumerable` to `IEnumerable`.
+
+### 6.2 OpenAIStreamingChatMessageContent
+
+- The `FinishReason` property type has changed from `CompletionsFinishReason` to `FinishReason`.
+- The `ToolCallUpdate` property has been renamed to `ToolCallUpdates` and its type has changed from `StreamingToolCallUpdate?` to `IReadOnlyList?`.
+- The `AuthorName` property is not initialized because it's not provided by the underlying library anymore.
+
+## 6.3 Metrics for AzureOpenAI Connector
+
+The meter `s_meter = new("Microsoft.SemanticKernel.Connectors.OpenAI");` and the relevant counters still have old names that contain "openai" in them, such as:
+
+- `semantic_kernel.connectors.openai.tokens.prompt`
+- `semantic_kernel.connectors.openai.tokens.completion`
+- `semantic_kernel.connectors.openai.tokens.total`
+
+## 7. Using Azure with your data (Data Sources)
+
+With the new `AzureOpenAIClient`, you can now specify your datasource thru the options and that requires a small change in your code to the new type.
+
+Before
+
+```csharp
+var promptExecutionSettings = new OpenAIPromptExecutionSettings
+{
+ AzureChatExtensionsOptions = new AzureChatExtensionsOptions
+ {
+ Extensions = [ new AzureSearchChatExtensionConfiguration
+ {
+ SearchEndpoint = new Uri(TestConfiguration.AzureAISearch.Endpoint),
+ Authentication = new OnYourDataApiKeyAuthenticationOptions(TestConfiguration.AzureAISearch.ApiKey),
+ IndexName = TestConfiguration.AzureAISearch.IndexName
+ }]
+ };
+};
+```
+
+After
+
+```csharp
+var promptExecutionSettings = new AzureOpenAIPromptExecutionSettings
+{
+ AzureChatDataSource = new AzureSearchChatDataSource
+ {
+ Endpoint = new Uri(TestConfiguration.AzureAISearch.Endpoint),
+ Authentication = DataSourceAuthentication.FromApiKey(TestConfiguration.AzureAISearch.ApiKey),
+ IndexName = TestConfiguration.AzureAISearch.IndexName
+ }
+};
+```
+
+## 8. Breaking glass scenarios
+
+Breaking glass scenarios are scenarios where you may need to update your code to use the new OpenAI Connector. Below are some of the breaking changes that you may need to be aware of.
+
+#### 8.1 KernelContent Metadata
+
+Some of the keys in the content metadata dictionary have changed, you will need to update your code to when using the previous key names.
+
+- `Created` -> `CreatedAt`
+
+#### 8.2 Prompt Filter Results
+
+The `PromptFilterResults` metadata type has changed from `IReadOnlyList` to `ContentFilterResultForPrompt`.
+
+#### 8.3 Content Filter Results
+
+The `ContentFilterResultsForPrompt` type has changed from `ContentFilterResultsForChoice` to `ContentFilterResultForResponse`.
+
+#### 8.4 Finish Reason
+
+The FinishReason metadata string value has changed from `stop` to `Stop`
+
+#### 8.5 Tool Calls
+
+The ToolCalls metadata string value has changed from `tool_calls` to `ToolCalls`
+
+#### 8.6 LogProbs / Log Probability Info
+
+The `LogProbabilityInfo` type has changed from `ChatChoiceLogProbabilityInfo` to `IReadOnlyList`.
+
+#### 8.7 Finish Details, Index, and Enhancements
+
+All of above have been removed.
+
+#### 8.8 Token Usage
+
+The Token usage naming convention from `OpenAI` changed from `Completion`, `Prompt` tokens to `Output` and `Input` respectively. You will need to update your code to use the new naming.
+
+The type also changed from `CompletionsUsage` to `ChatTokenUsage`.
+
+[Example of Token Usage Metadata Changes](https://github.com/microsoft/semantic-kernel/pull/7151/files#diff-a323107b9f8dc8559a83e50080c6e34551ddf6d9d770197a473f249589e8fb47)
+
+```diff
+- Before
+- var usage = FunctionResult.Metadata?["Usage"] as CompletionsUsage;
+- var completionTokesn = usage?.CompletionTokens ?? 0;
+- var promptTokens = usage?.PromptTokens ?? 0;
+
++ After
++ var usage = FunctionResult.Metadata?["Usage"] as ChatTokenUsage;
++ var promptTokens = usage?.InputTokens ?? 0;
++ var completionTokens = completionTokens: usage?.OutputTokens ?? 0;
+
+totalTokens: usage?.TotalTokens ?? 0;
+```
+
+#### 8.9 OpenAIClient
+
+The `OpenAIClient` type previously was a Azure specific namespace type but now it is an `OpenAI` SDK namespace type, you will need to update your code to use the new `OpenAIClient` type.
+
+When using Azure, you will need to update your code to use the new `AzureOpenAIClient` type.
+
+#### 8.10 Pipeline Configuration
+
+The new `OpenAI` SDK uses a different pipeline configuration, and has a dependency on `System.ClientModel` package. You will need to update your code to use the new `HttpClientPipelineTransport` transport configuration where before you were using `HttpClientTransport` from `Azure.Core.Pipeline`.
+
+[Example of Pipeline Configuration](https://github.com/microsoft/semantic-kernel/pull/7151/files#diff-fab02d9a75bf43cb57f71dddc920c3f72882acf83fb125d8cad963a643d26eb3)
+
+```diff
+var clientOptions = new OpenAIClientOptions
+{
+- // Before: From Azure.Core.Pipeline
+- Transport = new HttpClientTransport(httpClient),
+
++ // After: From OpenAI SDK -> System.ClientModel
++ Transport = new HttpClientPipelineTransport(httpClient),
+};
+```
diff --git a/dotnet/nuget/nuget-package.props b/dotnet/nuget/nuget-package.props
index 78c6592ad176..851e17bc86f9 100644
--- a/dotnet/nuget/nuget-package.props
+++ b/dotnet/nuget/nuget-package.props
@@ -1,8 +1,7 @@
- 1.17.2
-
+ 1.18.2
$(VersionPrefix)-$(VersionSuffix)
$(VersionPrefix)
@@ -10,7 +9,7 @@
true
- 1.17.2
+ 1.18.0-rc
$(NoWarn);CP0003
diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs
index 16c019aebbfd..d0b8e92d39d7 100644
--- a/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs
+++ b/dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs
@@ -12,7 +12,7 @@ namespace Agents;
/// Demonstrate usage of for both direction invocation
/// of and via .
///
-public class ChatCompletion_FunctionTermination(ITestOutputHelper output) : BaseTest(output)
+public class ChatCompletion_FunctionTermination(ITestOutputHelper output) : BaseAgentsTest(output)
{
[Fact]
public async Task UseAutoFunctionInvocationFilterWithAgentInvocationAsync()
@@ -44,25 +44,25 @@ public async Task UseAutoFunctionInvocationFilterWithAgentInvocationAsync()
Console.WriteLine("================================");
foreach (ChatMessageContent message in chat)
{
- this.WriteContent(message);
+ this.WriteAgentChatMessage(message);
}
// Local function to invoke agent and display the conversation messages.
async Task InvokeAgentAsync(string input)
{
- ChatMessageContent userContent = new(AuthorRole.User, input);
- chat.Add(userContent);
- this.WriteContent(userContent);
+ ChatMessageContent message = new(AuthorRole.User, input);
+ chat.Add(message);
+ this.WriteAgentChatMessage(message);
- await foreach (ChatMessageContent content in agent.InvokeAsync(chat))
+ await foreach (ChatMessageContent response in agent.InvokeAsync(chat))
{
// Do not add a message implicitly added to the history.
- if (!content.Items.Any(i => i is FunctionCallContent || i is FunctionResultContent))
+ if (!response.Items.Any(i => i is FunctionCallContent || i is FunctionResultContent))
{
- chat.Add(content);
+ chat.Add(response);
}
- this.WriteContent(content);
+ this.WriteAgentChatMessage(response);
}
}
}
@@ -98,28 +98,23 @@ public async Task UseAutoFunctionInvocationFilterWithAgentChatAsync()
ChatMessageContent[] history = await chat.GetChatMessagesAsync().ToArrayAsync();
for (int index = history.Length; index > 0; --index)
{
- this.WriteContent(history[index - 1]);
+ this.WriteAgentChatMessage(history[index - 1]);
}
// Local function to invoke agent and display the conversation messages.
async Task InvokeAgentAsync(string input)
{
- ChatMessageContent userContent = new(AuthorRole.User, input);
- chat.AddChatMessage(userContent);
- this.WriteContent(userContent);
+ ChatMessageContent message = new(AuthorRole.User, input);
+ chat.AddChatMessage(message);
+ this.WriteAgentChatMessage(message);
- await foreach (ChatMessageContent content in chat.InvokeAsync(agent))
+ await foreach (ChatMessageContent response in chat.InvokeAsync(agent))
{
- this.WriteContent(content);
+ this.WriteAgentChatMessage(response);
}
}
}
- private void WriteContent(ChatMessageContent content)
- {
- Console.WriteLine($"[{content.Items.LastOrDefault()?.GetType().Name ?? "(empty)"}] {content.Role} : '{content.Content}'");
- }
-
private Kernel CreateKernelWithFilter()
{
IKernelBuilder builder = Kernel.CreateBuilder();
diff --git a/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs b/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs
index d3e94386af96..575db7f7f288 100644
--- a/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs
+++ b/dotnet/samples/Concepts/Agents/ChatCompletion_Streaming.cs
@@ -12,7 +12,7 @@ namespace Agents;
/// Demonstrate creation of and
/// eliciting its response to three explicit user messages.
///
-public class ChatCompletion_Streaming(ITestOutputHelper output) : BaseTest(output)
+public class ChatCompletion_Streaming(ITestOutputHelper output) : BaseAgentsTest(output)
{
private const string ParrotName = "Parrot";
private const string ParrotInstructions = "Repeat the user message in the voice of a pirate and then end with a parrot sound.";
@@ -66,32 +66,33 @@ public async Task UseStreamingChatCompletionAgentWithPluginAsync()
// Local function to invoke agent and display the conversation messages.
private async Task InvokeAgentAsync(ChatCompletionAgent agent, ChatHistory chat, string input)
{
- chat.Add(new ChatMessageContent(AuthorRole.User, input));
-
- Console.WriteLine($"# {AuthorRole.User}: '{input}'");
+ ChatMessageContent message = new(AuthorRole.User, input);
+ chat.Add(message);
+ this.WriteAgentChatMessage(message);
StringBuilder builder = new();
- await foreach (StreamingChatMessageContent message in agent.InvokeStreamingAsync(chat))
+ await foreach (StreamingChatMessageContent response in agent.InvokeStreamingAsync(chat))
{
- if (string.IsNullOrEmpty(message.Content))
+ if (string.IsNullOrEmpty(response.Content))
{
continue;
}
if (builder.Length == 0)
{
- Console.WriteLine($"# {message.Role} - {message.AuthorName ?? "*"}:");
+ Console.WriteLine($"# {response.Role} - {response.AuthorName ?? "*"}:");
}
- Console.WriteLine($"\t > streamed: '{message.Content}'");
- builder.Append(message.Content);
+ Console.WriteLine($"\t > streamed: '{response.Content}'");
+ builder.Append(response.Content);
}
if (builder.Length > 0)
{
// Display full response and capture in chat history
- Console.WriteLine($"\t > complete: '{builder}'");
- chat.Add(new ChatMessageContent(AuthorRole.Assistant, builder.ToString()) { AuthorName = agent.Name });
+ ChatMessageContent response = new(AuthorRole.Assistant, builder.ToString()) { AuthorName = agent.Name };
+ chat.Add(response);
+ this.WriteAgentChatMessage(response);
}
}
diff --git a/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs b/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs
index aae984906ba3..0d7b27917d78 100644
--- a/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs
+++ b/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs
@@ -1,10 +1,10 @@
// Copyright (c) Microsoft. All rights reserved.
-using Azure.AI.OpenAI;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.Chat;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
+using OpenAI.Chat;
using Resources;
namespace Agents;
@@ -13,10 +13,8 @@ namespace Agents;
/// Demonstrate usage of and
/// to manage execution.
///
-public class ComplexChat_NestedShopper(ITestOutputHelper output) : BaseTest(output)
+public class ComplexChat_NestedShopper(ITestOutputHelper output) : BaseAgentsTest(output)
{
- protected override bool ForceOpenAI => true;
-
private const string InternalLeaderName = "InternalLeader";
private const string InternalLeaderInstructions =
"""
@@ -98,7 +96,7 @@ public async Task NestedChatWithAggregatorAgentAsync()
{
Console.WriteLine($"! {Model}");
- OpenAIPromptExecutionSettings jsonSettings = new() { ResponseFormat = ChatCompletionsResponseFormat.JsonObject };
+ OpenAIPromptExecutionSettings jsonSettings = new() { ResponseFormat = ChatResponseFormat.JsonObject };
OpenAIPromptExecutionSettings autoInvokeSettings = new() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions };
ChatCompletionAgent internalLeaderAgent = CreateAgent(InternalLeaderName, InternalLeaderInstructions);
@@ -154,20 +152,20 @@ public async Task NestedChatWithAggregatorAgentAsync()
Console.WriteLine(">>>> AGGREGATED CHAT");
Console.WriteLine(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>");
- await foreach (ChatMessageContent content in chat.GetChatMessagesAsync(personalShopperAgent).Reverse())
+ await foreach (ChatMessageContent message in chat.GetChatMessagesAsync(personalShopperAgent).Reverse())
{
- Console.WriteLine($">>>> {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'");
+ this.WriteAgentChatMessage(message);
}
async Task InvokeChatAsync(string input)
{
- chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input));
-
- Console.WriteLine($"# {AuthorRole.User}: '{input}'");
+ ChatMessageContent message = new(AuthorRole.User, input);
+ chat.AddChatMessage(message);
+ this.WriteAgentChatMessage(message);
- await foreach (ChatMessageContent content in chat.InvokeAsync(personalShopperAgent))
+ await foreach (ChatMessageContent response in chat.InvokeAsync(personalShopperAgent))
{
- Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'");
+ this.WriteAgentChatMessage(response);
}
Console.WriteLine($"\n# IS COMPLETE: {chat.IsComplete}");
diff --git a/dotnet/samples/Concepts/Agents/Legacy_AgentAuthoring.cs b/dotnet/samples/Concepts/Agents/Legacy_AgentAuthoring.cs
index 062262fe8a8c..53276c75a24d 100644
--- a/dotnet/samples/Concepts/Agents/Legacy_AgentAuthoring.cs
+++ b/dotnet/samples/Concepts/Agents/Legacy_AgentAuthoring.cs
@@ -9,12 +9,6 @@ namespace Agents;
///
public class Legacy_AgentAuthoring(ITestOutputHelper output) : BaseTest(output)
{
- ///
- /// Specific model is required that supports agents and parallel function calling.
- /// Currently this is limited to Open AI hosted services.
- ///
- private const string OpenAIFunctionEnabledModel = "gpt-4-1106-preview";
-
// Track agents for clean-up
private static readonly List s_agents = [];
@@ -72,7 +66,7 @@ private static async Task CreateArticleGeneratorAsync()
return
Track(
await new AgentBuilder()
- .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey)
+ .WithOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey)
.WithInstructions("You write concise opinionated articles that are published online. Use an outline to generate an article with one section of prose for each top-level outline element. Each section is based on research with a maximum of 120 words.")
.WithName("Article Author")
.WithDescription("Author an article on a given topic.")
@@ -87,7 +81,7 @@ private static async Task CreateOutlineGeneratorAsync()
return
Track(
await new AgentBuilder()
- .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey)
+ .WithOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey)
.WithInstructions("Produce an single-level outline (no child elements) based on the given topic with at most 3 sections.")
.WithName("Outline Generator")
.WithDescription("Generate an outline.")
@@ -100,7 +94,7 @@ private static async Task CreateResearchGeneratorAsync()
return
Track(
await new AgentBuilder()
- .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey)
+ .WithOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey)
.WithInstructions("Provide insightful research that supports the given topic based on your knowledge of the outline topic.")
.WithName("Researcher")
.WithDescription("Author research summary.")
diff --git a/dotnet/samples/Concepts/Agents/Legacy_AgentCharts.cs b/dotnet/samples/Concepts/Agents/Legacy_AgentCharts.cs
index 877ba0971710..d40755101309 100644
--- a/dotnet/samples/Concepts/Agents/Legacy_AgentCharts.cs
+++ b/dotnet/samples/Concepts/Agents/Legacy_AgentCharts.cs
@@ -1,8 +1,10 @@
// Copyright (c) Microsoft. All rights reserved.
using System.Diagnostics;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Azure.AI.OpenAI;
using Microsoft.SemanticKernel.Experimental.Agents;
+using OpenAI;
+using OpenAI.Files;
namespace Agents;
@@ -12,28 +14,15 @@ namespace Agents;
///
public sealed class Legacy_AgentCharts(ITestOutputHelper output) : BaseTest(output)
{
- ///
- /// Specific model is required that supports agents and parallel function calling.
- /// Currently this is limited to Open AI hosted services.
- ///
- private const string OpenAIFunctionEnabledModel = "gpt-4-1106-preview";
-
- ///
- /// Flag to force usage of OpenAI configuration if both
- /// and are defined.
- /// If 'false', Azure takes precedence.
- ///
- private new const bool ForceOpenAI = false;
-
///
/// Create a chart and retrieve by file_id.
///
- [Fact(Skip = "Launches external processes")]
+ [Fact]
public async Task CreateChartAsync()
{
Console.WriteLine("======== Using CodeInterpreter tool ========");
- var fileService = CreateFileService();
+ FileClient fileClient = CreateFileClient();
var agent = await CreateAgentBuilder().WithCodeInterpreter().BuildAsync();
@@ -69,11 +58,11 @@ async Task InvokeAgentAsync(IAgentThread thread, string imageName, string questi
{
var filename = $"{imageName}.jpg";
var path = Path.Combine(Environment.CurrentDirectory, filename);
- Console.WriteLine($"# {message.Role}: {message.Content}");
+ var fileId = message.Content;
+ Console.WriteLine($"# {message.Role}: {fileId}");
Console.WriteLine($"# {message.Role}: {path}");
- var content = await fileService.GetFileContentAsync(message.Content);
- await using var outputStream = File.OpenWrite(filename);
- await outputStream.WriteAsync(content.Data!.Value);
+ BinaryData content = await fileClient.DownloadFileAsync(fileId);
+ File.WriteAllBytes(filename, content.ToArray());
Process.Start(
new ProcessStartInfo
{
@@ -91,19 +80,23 @@ async Task InvokeAgentAsync(IAgentThread thread, string imageName, string questi
}
}
- private static OpenAIFileService CreateFileService()
+ private FileClient CreateFileClient()
+
{
- return
- ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ?
- new OpenAIFileService(TestConfiguration.OpenAI.ApiKey) :
- new OpenAIFileService(new Uri(TestConfiguration.AzureOpenAI.Endpoint), apiKey: TestConfiguration.AzureOpenAI.ApiKey);
+ OpenAIClient client =
+ this.ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ?
+ new OpenAIClient(TestConfiguration.OpenAI.ApiKey) :
+ new AzureOpenAIClient(new Uri(TestConfiguration.AzureOpenAI.Endpoint), TestConfiguration.AzureOpenAI.ApiKey);
+
+ return client.GetFileClient();
}
+#pragma warning restore CS0618 // Type or member is obsolete
- private static AgentBuilder CreateAgentBuilder()
+ private AgentBuilder CreateAgentBuilder()
{
return
- ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ?
- new AgentBuilder().WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) :
+ this.ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ?
+ new AgentBuilder().WithOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) :
new AgentBuilder().WithAzureOpenAIChatCompletion(TestConfiguration.AzureOpenAI.Endpoint, TestConfiguration.AzureOpenAI.ChatDeploymentName, TestConfiguration.AzureOpenAI.ApiKey);
}
}
diff --git a/dotnet/samples/Concepts/Agents/Legacy_AgentCollaboration.cs b/dotnet/samples/Concepts/Agents/Legacy_AgentCollaboration.cs
index 53ae0c07662a..fa257d2764b3 100644
--- a/dotnet/samples/Concepts/Agents/Legacy_AgentCollaboration.cs
+++ b/dotnet/samples/Concepts/Agents/Legacy_AgentCollaboration.cs
@@ -9,17 +9,6 @@ namespace Agents;
///
public class Legacy_AgentCollaboration(ITestOutputHelper output) : BaseTest(output)
{
- ///
- /// Specific model is required that supports agents and function calling.
- /// Currently this is limited to Open AI hosted services.
- ///
- private const string OpenAIFunctionEnabledModel = "gpt-4-turbo-preview";
-
- ///
- /// Set this to 'true' to target OpenAI instead of Azure OpenAI.
- ///
- private const bool UseOpenAI = false;
-
// Track agents for clean-up
private static readonly List s_agents = [];
@@ -29,8 +18,6 @@ public class Legacy_AgentCollaboration(ITestOutputHelper output) : BaseTest(outp
[Fact(Skip = "This test take more than 5 minutes to execute")]
public async Task RunCollaborationAsync()
{
- Console.WriteLine($"======== Example72:Collaboration:{(UseOpenAI ? "OpenAI" : "AzureAI")} ========");
-
IAgentThread? thread = null;
try
{
@@ -82,8 +69,6 @@ public async Task RunCollaborationAsync()
[Fact(Skip = "This test take more than 2 minutes to execute")]
public async Task RunAsPluginsAsync()
{
- Console.WriteLine($"======== Example72:AsPlugins:{(UseOpenAI ? "OpenAI" : "AzureAI")} ========");
-
try
{
// Create copy-writer agent to generate ideas
@@ -113,7 +98,7 @@ await CreateAgentBuilder()
}
}
- private static async Task CreateCopyWriterAsync(IAgent? agent = null)
+ private async Task CreateCopyWriterAsync(IAgent? agent = null)
{
return
Track(
@@ -125,7 +110,7 @@ await CreateAgentBuilder()
.BuildAsync());
}
- private static async Task CreateArtDirectorAsync()
+ private async Task CreateArtDirectorAsync()
{
return
Track(
@@ -136,13 +121,13 @@ await CreateAgentBuilder()
.BuildAsync());
}
- private static AgentBuilder CreateAgentBuilder()
+ private AgentBuilder CreateAgentBuilder()
{
var builder = new AgentBuilder();
return
- UseOpenAI ?
- builder.WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) :
+ this.ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ?
+ builder.WithOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) :
builder.WithAzureOpenAIChatCompletion(TestConfiguration.AzureOpenAI.Endpoint, TestConfiguration.AzureOpenAI.ChatDeploymentName, TestConfiguration.AzureOpenAI.ApiKey);
}
diff --git a/dotnet/samples/Concepts/Agents/Legacy_AgentDelegation.cs b/dotnet/samples/Concepts/Agents/Legacy_AgentDelegation.cs
index 86dacb9c256d..b4b0ed93199f 100644
--- a/dotnet/samples/Concepts/Agents/Legacy_AgentDelegation.cs
+++ b/dotnet/samples/Concepts/Agents/Legacy_AgentDelegation.cs
@@ -12,12 +12,6 @@ namespace Agents;
///
public class Legacy_AgentDelegation(ITestOutputHelper output) : BaseTest(output)
{
- ///
- /// Specific model is required that supports agents and function calling.
- /// Currently this is limited to Open AI hosted services.
- ///
- private const string OpenAIFunctionEnabledModel = "gpt-3.5-turbo-1106";
-
// Track agents for clean-up
private static readonly List s_agents = [];
@@ -27,8 +21,6 @@ public class Legacy_AgentDelegation(ITestOutputHelper output) : BaseTest(output)
[Fact]
public async Task RunAsync()
{
- Console.WriteLine("======== Example71_AgentDelegation ========");
-
if (TestConfiguration.OpenAI.ApiKey is null)
{
Console.WriteLine("OpenAI apiKey not found. Skipping example.");
@@ -39,11 +31,11 @@ public async Task RunAsync()
try
{
- var plugin = KernelPluginFactory.CreateFromType();
+ var plugin = KernelPluginFactory.CreateFromType();
var menuAgent =
Track(
await new AgentBuilder()
- .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey)
+ .WithOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey)
.FromTemplate(EmbeddedResource.Read("Agents.ToolAgent.yaml"))
.WithDescription("Answer questions about how the menu uses the tool.")
.WithPlugin(plugin)
@@ -52,14 +44,14 @@ public async Task RunAsync()
var parrotAgent =
Track(
await new AgentBuilder()
- .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey)
+ .WithOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey)
.FromTemplate(EmbeddedResource.Read("Agents.ParrotAgent.yaml"))
.BuildAsync());
var toolAgent =
Track(
await new AgentBuilder()
- .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey)
+ .WithOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey)
.FromTemplate(EmbeddedResource.Read("Agents.ToolAgent.yaml"))
.WithPlugin(parrotAgent.AsPlugin())
.WithPlugin(menuAgent.AsPlugin())
diff --git a/dotnet/samples/Concepts/Agents/Legacy_AgentTools.cs b/dotnet/samples/Concepts/Agents/Legacy_AgentTools.cs
index 66d93ecc88d9..00af8faab617 100644
--- a/dotnet/samples/Concepts/Agents/Legacy_AgentTools.cs
+++ b/dotnet/samples/Concepts/Agents/Legacy_AgentTools.cs
@@ -1,8 +1,9 @@
// Copyright (c) Microsoft. All rights reserved.
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Azure.AI.OpenAI;
using Microsoft.SemanticKernel.Experimental.Agents;
+using OpenAI;
+using OpenAI.Files;
using Resources;
namespace Agents;
@@ -13,21 +14,8 @@ namespace Agents;
///
public sealed class Legacy_AgentTools(ITestOutputHelper output) : BaseTest(output)
{
- ///
- /// Specific model is required that supports agents and parallel function calling.
- /// Currently this is limited to Open AI hosted services.
- ///
- private const string OpenAIFunctionEnabledModel = "gpt-4-1106-preview";
-
- ///
- /// Flag to force usage of OpenAI configuration if both
- /// and are defined.
- /// If 'false', Azure takes precedence.
- ///
- ///
- /// NOTE: Retrieval tools is not currently available on Azure.
- ///
- private new const bool ForceOpenAI = true;
+ ///
+ protected override bool ForceOpenAI => true;
// Track agents for clean-up
private readonly List _agents = [];
@@ -79,12 +67,13 @@ public async Task RunRetrievalToolAsync()
return;
}
- Kernel kernel = CreateFileEnabledKernel();
- var fileService = kernel.GetRequiredService();
- var result =
- await fileService.UploadContentAsync(
- new BinaryContent(await EmbeddedResource.ReadAllAsync("travelinfo.txt")!, "text/plain"),
- new OpenAIFileUploadExecutionSettings("travelinfo.txt", OpenAIFilePurpose.Assistants));
+ FileClient fileClient = CreateFileClient();
+
+ OpenAIFileInfo result =
+ await fileClient.UploadFileAsync(
+ new BinaryData(await EmbeddedResource.ReadAllAsync("travelinfo.txt")!),
+ "travelinfo.txt",
+ FileUploadPurpose.Assistants);
var fileId = result.Id;
Console.WriteLine($"! {fileId}");
@@ -110,7 +99,7 @@ await ChatAsync(
}
finally
{
- await Task.WhenAll(this._agents.Select(a => a.DeleteAsync()).Append(fileService.DeleteFileAsync(fileId)));
+ await Task.WhenAll(this._agents.Select(a => a.DeleteAsync()).Append(fileClient.DeleteFileAsync(fileId)));
}
}
@@ -165,19 +154,21 @@ async Task InvokeAgentAsync(IAgent agent, string question)
}
}
- private static Kernel CreateFileEnabledKernel()
+ private FileClient CreateFileClient()
{
- return
- ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ?
- Kernel.CreateBuilder().AddOpenAIFiles(TestConfiguration.OpenAI.ApiKey).Build() :
- Kernel.CreateBuilder().AddAzureOpenAIFiles(TestConfiguration.AzureOpenAI.Endpoint, TestConfiguration.AzureOpenAI.ApiKey).Build();
+ OpenAIClient client =
+ this.ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ?
+ new OpenAIClient(TestConfiguration.OpenAI.ApiKey) :
+ new AzureOpenAIClient(new Uri(TestConfiguration.AzureOpenAI.Endpoint), TestConfiguration.AzureOpenAI.ApiKey);
+
+ return client.GetFileClient();
}
- private static AgentBuilder CreateAgentBuilder()
+ private AgentBuilder CreateAgentBuilder()
{
return
- ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ?
- new AgentBuilder().WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) :
+ this.ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ?
+ new AgentBuilder().WithOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) :
new AgentBuilder().WithAzureOpenAIChatCompletion(TestConfiguration.AzureOpenAI.Endpoint, TestConfiguration.AzureOpenAI.ChatDeploymentName, TestConfiguration.AzureOpenAI.ApiKey);
}
diff --git a/dotnet/samples/Concepts/Agents/Legacy_Agents.cs b/dotnet/samples/Concepts/Agents/Legacy_Agents.cs
index 5af10987bb3a..31cc4926392b 100644
--- a/dotnet/samples/Concepts/Agents/Legacy_Agents.cs
+++ b/dotnet/samples/Concepts/Agents/Legacy_Agents.cs
@@ -13,19 +13,6 @@ namespace Agents;
///
public class Legacy_Agents(ITestOutputHelper output) : BaseTest(output)
{
- ///
- /// Specific model is required that supports agents and function calling.
- /// Currently this is limited to Open AI hosted services.
- ///
- private const string OpenAIFunctionEnabledModel = "gpt-3.5-turbo-1106";
-
- ///
- /// Flag to force usage of OpenAI configuration if both
- /// and are defined.
- /// If 'false', Azure takes precedence.
- ///
- private new const bool ForceOpenAI = false;
-
///
/// Chat using the "Parrot" agent.
/// Tools/functions: None
@@ -61,18 +48,12 @@ public async Task RunWithMethodFunctionsAsync()
await ChatAsync(
"Agents.ToolAgent.yaml", // Defined under ./Resources/Agents
plugin,
- arguments: new() { { LegacyMenuPlugin.CorrelationIdArgument, 3.141592653 } },
+ arguments: null,
"Hello",
"What is the special soup?",
"What is the special drink?",
"Do you have enough soup for 5 orders?",
"Thank you!");
-
- Console.WriteLine("\nCorrelation Ids:");
- foreach (string correlationId in menuApi.CorrelationIds)
- {
- Console.WriteLine($"- {correlationId}");
- }
}
///
@@ -114,7 +95,7 @@ public async Task RunAsFunctionAsync()
// Create parrot agent, same as the other cases.
var agent =
await new AgentBuilder()
- .WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey)
+ .WithOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey)
.FromTemplate(EmbeddedResource.Read("Agents.ParrotAgent.yaml"))
.BuildAsync();
@@ -187,11 +168,11 @@ await Task.WhenAll(
}
}
- private static AgentBuilder CreateAgentBuilder()
+ private AgentBuilder CreateAgentBuilder()
{
return
- ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ?
- new AgentBuilder().WithOpenAIChatCompletion(OpenAIFunctionEnabledModel, TestConfiguration.OpenAI.ApiKey) :
+ this.ForceOpenAI || string.IsNullOrEmpty(TestConfiguration.AzureOpenAI.Endpoint) ?
+ new AgentBuilder().WithOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey) :
new AgentBuilder().WithAzureOpenAIChatCompletion(TestConfiguration.AzureOpenAI.Endpoint, TestConfiguration.AzureOpenAI.ChatDeploymentName, TestConfiguration.AzureOpenAI.ApiKey);
}
}
diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs b/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs
index d3a894dd6c8e..21b19c1d342c 100644
--- a/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs
+++ b/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs
@@ -10,7 +10,7 @@ namespace Agents;
/// Demonstrate that two different agent types are able to participate in the same conversation.
/// In this case a and participate.
///
-public class MixedChat_Agents(ITestOutputHelper output) : BaseTest(output)
+public class MixedChat_Agents(ITestOutputHelper output) : BaseAgentsTest(output)
{
private const string ReviewerName = "ArtDirector";
private const string ReviewerInstructions =
@@ -47,12 +47,12 @@ public async Task ChatWithOpenAIAssistantAgentAndChatCompletionAgentAsync()
OpenAIAssistantAgent agentWriter =
await OpenAIAssistantAgent.CreateAsync(
kernel: new(),
- config: new(this.ApiKey, this.Endpoint),
- definition: new()
+ clientProvider: this.GetClientProvider(),
+ definition: new(this.Model)
{
Instructions = CopyWriterInstructions,
Name = CopyWriterName,
- ModelId = this.Model,
+ Metadata = AssistantSampleMetadata,
});
// Create a chat for agent interaction.
@@ -76,16 +76,16 @@ await OpenAIAssistantAgent.CreateAsync(
};
// Invoke chat and display messages.
- string input = "concept: maps made out of egg cartons.";
- chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input));
- Console.WriteLine($"# {AuthorRole.User}: '{input}'");
+ ChatMessageContent input = new(AuthorRole.User, "concept: maps made out of egg cartons.");
+ chat.AddChatMessage(input);
+ this.WriteAgentChatMessage(input);
- await foreach (ChatMessageContent content in chat.InvokeAsync())
+ await foreach (ChatMessageContent response in chat.InvokeAsync())
{
- Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'");
+ this.WriteAgentChatMessage(response);
}
- Console.WriteLine($"# IS COMPLETE: {chat.IsComplete}");
+ Console.WriteLine($"\n[IS COMPLETED: {chat.IsComplete}]");
}
private sealed class ApprovalTerminationStrategy : TerminationStrategy
diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Files.cs b/dotnet/samples/Concepts/Agents/MixedChat_Files.cs
index 5d96de68da72..0219c25f7712 100644
--- a/dotnet/samples/Concepts/Agents/MixedChat_Files.cs
+++ b/dotnet/samples/Concepts/Agents/MixedChat_Files.cs
@@ -1,10 +1,9 @@
// Copyright (c) Microsoft. All rights reserved.
-using System.Text;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
+using OpenAI.Files;
using Resources;
namespace Agents;
@@ -13,24 +12,22 @@ namespace Agents;
/// Demonstrate agent interacts with
/// when it produces file output.
///
-public class MixedChat_Files(ITestOutputHelper output) : BaseTest(output)
+public class MixedChat_Files(ITestOutputHelper output) : BaseAgentsTest(output)
{
- ///
- /// Target OpenAI services.
- ///
- protected override bool ForceOpenAI => true;
-
private const string SummaryInstructions = "Summarize the entire conversation for the user in natural language.";
[Fact]
public async Task AnalyzeFileAndGenerateReportAsync()
{
- OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey);
+ OpenAIClientProvider provider = this.GetClientProvider();
+
+ FileClient fileClient = provider.Client.GetFileClient();
- OpenAIFileReference uploadFile =
- await fileService.UploadContentAsync(
- new BinaryContent(await EmbeddedResource.ReadAllAsync("30-user-context.txt"), mimeType: "text/plain"),
- new OpenAIFileUploadExecutionSettings("30-user-context.txt", OpenAIFilePurpose.Assistants));
+ OpenAIFileInfo uploadFile =
+ await fileClient.UploadFileAsync(
+ new BinaryData(await EmbeddedResource.ReadAllAsync("30-user-context.txt")),
+ "30-user-context.txt",
+ FileUploadPurpose.Assistants);
Console.WriteLine(this.ApiKey);
@@ -38,12 +35,12 @@ await fileService.UploadContentAsync(
OpenAIAssistantAgent analystAgent =
await OpenAIAssistantAgent.CreateAsync(
kernel: new(),
- config: new(this.ApiKey, this.Endpoint),
- new()
+ provider,
+ new(this.Model)
{
- EnableCodeInterpreter = true, // Enable code-interpreter
- ModelId = this.Model,
- FileIds = [uploadFile.Id] // Associate uploaded file with assistant
+ EnableCodeInterpreter = true,
+ CodeInterpreterFileIds = [uploadFile.Id], // Associate uploaded file with assistant code-interpreter
+ Metadata = AssistantSampleMetadata,
});
ChatCompletionAgent summaryAgent =
@@ -70,7 +67,7 @@ Create a tab delimited file report of the ordered (descending) frequency distrib
finally
{
await analystAgent.DeleteAsync();
- await fileService.DeleteFileAsync(uploadFile.Id);
+ await fileClient.DeleteFileAsync(uploadFile.Id);
}
// Local function to invoke agent and display the conversation messages.
@@ -78,21 +75,15 @@ async Task InvokeAgentAsync(Agent agent, string? input = null)
{
if (!string.IsNullOrWhiteSpace(input))
{
+ ChatMessageContent message = new(AuthorRole.User, input);
chat.AddChatMessage(new(AuthorRole.User, input));
- Console.WriteLine($"# {AuthorRole.User}: '{input}'");
+ this.WriteAgentChatMessage(message);
}
- await foreach (ChatMessageContent content in chat.InvokeAsync(agent))
+ await foreach (ChatMessageContent response in chat.InvokeAsync(agent))
{
- Console.WriteLine($"\n# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'");
-
- foreach (AnnotationContent annotation in content.Items.OfType())
- {
- Console.WriteLine($"\t* '{annotation.Quote}' => {annotation.FileId}");
- BinaryContent fileContent = await fileService.GetFileContentAsync(annotation.FileId!);
- byte[] byteContent = fileContent.Data?.ToArray() ?? [];
- Console.WriteLine($"\n{Encoding.Default.GetString(byteContent)}");
- }
+ this.WriteAgentChatMessage(response);
+ await this.DownloadResponseContentAsync(fileClient, response);
}
}
}
diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Images.cs b/dotnet/samples/Concepts/Agents/MixedChat_Images.cs
index 385577573ac6..437643e25574 100644
--- a/dotnet/samples/Concepts/Agents/MixedChat_Images.cs
+++ b/dotnet/samples/Concepts/Agents/MixedChat_Images.cs
@@ -3,7 +3,7 @@
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
+using OpenAI.Files;
namespace Agents;
@@ -11,13 +11,8 @@ namespace Agents;
/// Demonstrate agent interacts with
/// when it produces image output.
///
-public class MixedChat_Images(ITestOutputHelper output) : BaseTest(output)
+public class MixedChat_Images(ITestOutputHelper output) : BaseAgentsTest(output)
{
- ///
- /// Target OpenAI services.
- ///
- protected override bool ForceOpenAI => true;
-
private const string AnalystName = "Analyst";
private const string AnalystInstructions = "Create charts as requested without explanation.";
@@ -27,19 +22,21 @@ public class MixedChat_Images(ITestOutputHelper output) : BaseTest(output)
[Fact]
public async Task AnalyzeDataAndGenerateChartAsync()
{
- OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey);
+ OpenAIClientProvider provider = this.GetClientProvider();
+
+ FileClient fileClient = provider.Client.GetFileClient();
// Define the agents
OpenAIAssistantAgent analystAgent =
await OpenAIAssistantAgent.CreateAsync(
kernel: new(),
- config: new(this.ApiKey, this.Endpoint),
- new()
+ provider,
+ new(this.Model)
{
Instructions = AnalystInstructions,
Name = AnalystName,
EnableCodeInterpreter = true,
- ModelId = this.Model,
+ Metadata = AssistantSampleMetadata,
});
ChatCompletionAgent summaryAgent =
@@ -86,26 +83,15 @@ async Task InvokeAgentAsync(Agent agent, string? input = null)
{
if (!string.IsNullOrWhiteSpace(input))
{
- chat.AddChatMessage(new(AuthorRole.User, input));
- Console.WriteLine($"# {AuthorRole.User}: '{input}'");
+ ChatMessageContent message = new(AuthorRole.User, input);
+ chat.AddChatMessage(message);
+ this.WriteAgentChatMessage(message);
}
- await foreach (ChatMessageContent message in chat.InvokeAsync(agent))
+ await foreach (ChatMessageContent response in chat.InvokeAsync(agent))
{
- if (!string.IsNullOrWhiteSpace(message.Content))
- {
- Console.WriteLine($"\n# {message.Role} - {message.AuthorName ?? "*"}: '{message.Content}'");
- }
-
- foreach (FileReferenceContent fileReference in message.Items.OfType())
- {
- Console.WriteLine($"\t* Generated image - @{fileReference.FileId}");
- BinaryContent fileContent = await fileService.GetFileContentAsync(fileReference.FileId!);
- byte[] byteContent = fileContent.Data?.ToArray() ?? [];
- string filePath = Path.ChangeExtension(Path.GetTempFileName(), ".png");
- await File.WriteAllBytesAsync($"{filePath}.png", byteContent);
- Console.WriteLine($"\t* Local path - {filePath}");
- }
+ this.WriteAgentChatMessage(response);
+ await this.DownloadResponseImageAsync(fileClient, response);
}
}
}
diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs b/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs
index 92aa8a9ce9d4..f9afcc55b7f5 100644
--- a/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs
+++ b/dotnet/samples/Concepts/Agents/MixedChat_Reset.cs
@@ -3,14 +3,13 @@
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
namespace Agents;
///
/// Demonstrate the use of .
///
-public class MixedChat_Reset(ITestOutputHelper output) : BaseTest(output)
+public class MixedChat_Reset(ITestOutputHelper output) : BaseAgentsTest(output)
{
private const string AgentInstructions =
"""
@@ -21,18 +20,17 @@ The user may either provide information or query on information previously provi
[Fact]
public async Task ResetChatAsync()
{
- OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey);
+ OpenAIClientProvider provider = this.GetClientProvider();
// Define the agents
OpenAIAssistantAgent assistantAgent =
await OpenAIAssistantAgent.CreateAsync(
kernel: new(),
- config: new(this.ApiKey, this.Endpoint),
- new()
+ provider,
+ new(this.Model)
{
Name = nameof(OpenAIAssistantAgent),
Instructions = AgentInstructions,
- ModelId = this.Model,
});
ChatCompletionAgent chatAgent =
@@ -74,16 +72,14 @@ async Task InvokeAgentAsync(Agent agent, string? input = null)
{
if (!string.IsNullOrWhiteSpace(input))
{
- chat.AddChatMessage(new(AuthorRole.User, input));
- Console.WriteLine($"\n# {AuthorRole.User}: '{input}'");
+ ChatMessageContent message = new(AuthorRole.User, input);
+ chat.AddChatMessage(message);
+ this.WriteAgentChatMessage(message);
}
- await foreach (ChatMessageContent message in chat.InvokeAsync(agent))
+ await foreach (ChatMessageContent response in chat.InvokeAsync(agent))
{
- if (!string.IsNullOrWhiteSpace(message.Content))
- {
- Console.WriteLine($"\n# {message.Role} - {message.AuthorName ?? "*"}: '{message.Content}'");
- }
+ this.WriteAgentChatMessage(response);
}
}
}
diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs
index ef5ba80154fa..cd81f7c4d187 100644
--- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs
+++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs
@@ -3,6 +3,7 @@
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Files;
namespace Agents;
@@ -10,30 +11,29 @@ namespace Agents;
/// Demonstrate using code-interpreter with to
/// produce image content displays the requested charts.
///
-public class OpenAIAssistant_ChartMaker(ITestOutputHelper output) : BaseTest(output)
+public class OpenAIAssistant_ChartMaker(ITestOutputHelper output) : BaseAgentsTest(output)
{
- ///
- /// Target Open AI services.
- ///
- protected override bool ForceOpenAI => true;
-
private const string AgentName = "ChartMaker";
private const string AgentInstructions = "Create charts as requested without explanation.";
[Fact]
public async Task GenerateChartWithOpenAIAssistantAgentAsync()
{
+ OpenAIClientProvider provider = this.GetClientProvider();
+
+ FileClient fileClient = provider.Client.GetFileClient();
+
// Define the agent
OpenAIAssistantAgent agent =
await OpenAIAssistantAgent.CreateAsync(
kernel: new(),
- config: new(this.ApiKey, this.Endpoint),
- new()
+ provider,
+ new(this.Model)
{
Instructions = AgentInstructions,
Name = AgentName,
EnableCodeInterpreter = true,
- ModelId = this.Model,
+ Metadata = AssistantSampleMetadata,
});
// Create a chat for agent interaction.
@@ -55,6 +55,7 @@ Sum 426 1622 856 2904
""");
await InvokeAgentAsync("Can you regenerate this same chart using the category names as the bar colors?");
+ await InvokeAgentAsync("Perfect, can you regenerate this as a line chart?");
}
finally
{
@@ -64,21 +65,14 @@ Sum 426 1622 856 2904
// Local function to invoke agent and display the conversation messages.
async Task InvokeAgentAsync(string input)
{
- chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input));
-
- Console.WriteLine($"# {AuthorRole.User}: '{input}'");
+ ChatMessageContent message = new(AuthorRole.User, input);
+ chat.AddChatMessage(new(AuthorRole.User, input));
+ this.WriteAgentChatMessage(message);
- await foreach (ChatMessageContent message in chat.InvokeAsync(agent))
+ await foreach (ChatMessageContent response in chat.InvokeAsync(agent))
{
- if (!string.IsNullOrWhiteSpace(message.Content))
- {
- Console.WriteLine($"# {message.Role} - {message.AuthorName ?? "*"}: '{message.Content}'");
- }
-
- foreach (FileReferenceContent fileReference in message.Items.OfType())
- {
- Console.WriteLine($"# {message.Role} - {message.AuthorName ?? "*"}: @{fileReference.FileId}");
- }
+ this.WriteAgentChatMessage(response);
+ await this.DownloadResponseImageAsync(fileClient, response);
}
}
}
diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs
index 8e64006ee9d3..dc4af2ad2743 100644
--- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs
+++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs
@@ -1,10 +1,9 @@
// Copyright (c) Microsoft. All rights reserved.
-using System.Text;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
+using OpenAI.Files;
using Resources;
namespace Agents;
@@ -12,35 +11,31 @@ namespace Agents;
///
/// Demonstrate using code-interpreter to manipulate and generate csv files with .
///
-public class OpenAIAssistant_FileManipulation(ITestOutputHelper output) : BaseTest(output)
+public class OpenAIAssistant_FileManipulation(ITestOutputHelper output) : BaseAgentsTest(output)
{
- ///
- /// Target OpenAI services.
- ///
- protected override bool ForceOpenAI => true;
-
[Fact]
public async Task AnalyzeCSVFileUsingOpenAIAssistantAgentAsync()
{
- OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey);
+ OpenAIClientProvider provider = this.GetClientProvider();
- OpenAIFileReference uploadFile =
- await fileService.UploadContentAsync(
- new BinaryContent(await EmbeddedResource.ReadAllAsync("sales.csv"), mimeType: "text/plain"),
- new OpenAIFileUploadExecutionSettings("sales.csv", OpenAIFilePurpose.Assistants));
+ FileClient fileClient = provider.Client.GetFileClient();
- Console.WriteLine(this.ApiKey);
+ OpenAIFileInfo uploadFile =
+ await fileClient.UploadFileAsync(
+ new BinaryData(await EmbeddedResource.ReadAllAsync("sales.csv")!),
+ "sales.csv",
+ FileUploadPurpose.Assistants);
// Define the agent
OpenAIAssistantAgent agent =
await OpenAIAssistantAgent.CreateAsync(
kernel: new(),
- config: new(this.ApiKey, this.Endpoint),
- new()
+ provider,
+ new(this.Model)
{
- EnableCodeInterpreter = true, // Enable code-interpreter
- ModelId = this.Model,
- FileIds = [uploadFile.Id] // Associate uploaded file
+ EnableCodeInterpreter = true,
+ CodeInterpreterFileIds = [uploadFile.Id],
+ Metadata = AssistantSampleMetadata,
});
// Create a chat for agent interaction.
@@ -56,27 +51,20 @@ await OpenAIAssistantAgent.CreateAsync(
finally
{
await agent.DeleteAsync();
- await fileService.DeleteFileAsync(uploadFile.Id);
+ await fileClient.DeleteFileAsync(uploadFile.Id);
}
// Local function to invoke agent and display the conversation messages.
async Task InvokeAgentAsync(string input)
{
- chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input));
-
- Console.WriteLine($"# {AuthorRole.User}: '{input}'");
+ ChatMessageContent message = new(AuthorRole.User, input);
+ chat.AddChatMessage(new(AuthorRole.User, input));
+ this.WriteAgentChatMessage(message);
- await foreach (ChatMessageContent content in chat.InvokeAsync(agent))
+ await foreach (ChatMessageContent response in chat.InvokeAsync(agent))
{
- Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'");
-
- foreach (AnnotationContent annotation in content.Items.OfType())
- {
- Console.WriteLine($"\n* '{annotation.Quote}' => {annotation.FileId}");
- BinaryContent fileContent = await fileService.GetFileContentAsync(annotation.FileId!);
- byte[] byteContent = fileContent.Data?.ToArray() ?? [];
- Console.WriteLine(Encoding.Default.GetString(byteContent));
- }
+ this.WriteAgentChatMessage(response);
+ await this.DownloadResponseContentAsync(fileClient, response);
}
}
}
diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileService.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileService.cs
index 7537f53da726..a8f31622c753 100644
--- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileService.cs
+++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileService.cs
@@ -18,6 +18,7 @@ public class OpenAIAssistant_FileService(ITestOutputHelper output) : BaseTest(ou
[Fact]
public async Task UploadAndRetrieveFilesAsync()
{
+#pragma warning disable CS0618 // Type or member is obsolete
OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey);
BinaryContent[] files = [
@@ -27,7 +28,7 @@ public async Task UploadAndRetrieveFilesAsync()
new BinaryContent(data: await EmbeddedResource.ReadAllAsync("travelinfo.txt"), mimeType: "text/plain") { InnerContent = "travelinfo.txt" }
];
- var fileContents = new Dictionary();
+ Dictionary fileContents = new();
foreach (BinaryContent file in files)
{
OpenAIFileReference result = await fileService.UploadContentAsync(file, new(file.InnerContent!.ToString()!, OpenAIFilePurpose.FineTune));
@@ -48,7 +49,7 @@ public async Task UploadAndRetrieveFilesAsync()
string? fileName = fileContents[fileReference.Id].InnerContent!.ToString();
ReadOnlyMemory data = content.Data ?? new();
- var typedContent = mimeType switch
+ BinaryContent typedContent = mimeType switch
{
"image/jpeg" => new ImageContent(data, mimeType) { Uri = content.Uri, InnerContent = fileName, Metadata = content.Metadata },
"audio/wav" => new AudioContent(data, mimeType) { Uri = content.Uri, InnerContent = fileName, Metadata = content.Metadata },
@@ -62,5 +63,7 @@ public async Task UploadAndRetrieveFilesAsync()
// Delete the test file remotely
await fileService.DeleteFileAsync(fileReference.Id);
}
+
+#pragma warning restore CS0618 // Type or member is obsolete
}
}
diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Retrieval.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Retrieval.cs
deleted file mode 100644
index 6f30b6974ff7..000000000000
--- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Retrieval.cs
+++ /dev/null
@@ -1,70 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents;
-using Microsoft.SemanticKernel.Agents.OpenAI;
-using Microsoft.SemanticKernel.ChatCompletion;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
-using Resources;
-
-namespace Agents;
-
-///
-/// Demonstrate using retrieval on .
-///
-public class OpenAIAssistant_Retrieval(ITestOutputHelper output) : BaseTest(output)
-{
- ///
- /// Retrieval tool not supported on Azure OpenAI.
- ///
- protected override bool ForceOpenAI => true;
-
- [Fact]
- public async Task UseRetrievalToolWithOpenAIAssistantAgentAsync()
- {
- OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey);
-
- OpenAIFileReference uploadFile =
- await fileService.UploadContentAsync(new BinaryContent(await EmbeddedResource.ReadAllAsync("travelinfo.txt")!, "text/plain"),
- new OpenAIFileUploadExecutionSettings("travelinfo.txt", OpenAIFilePurpose.Assistants));
-
- // Define the agent
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateAsync(
- kernel: new(),
- config: new(this.ApiKey, this.Endpoint),
- new()
- {
- EnableRetrieval = true, // Enable retrieval
- ModelId = this.Model,
- FileIds = [uploadFile.Id] // Associate uploaded file
- });
-
- // Create a chat for agent interaction.
- AgentGroupChat chat = new();
-
- // Respond to user input
- try
- {
- await InvokeAgentAsync("Where did sam go?");
- await InvokeAgentAsync("When does the flight leave Seattle?");
- await InvokeAgentAsync("What is the hotel contact info at the destination?");
- }
- finally
- {
- await agent.DeleteAsync();
- }
-
- // Local function to invoke agent and display the conversation messages.
- async Task InvokeAgentAsync(string input)
- {
- chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input));
-
- Console.WriteLine($"# {AuthorRole.User}: '{input}'");
-
- await foreach (ChatMessageContent content in chat.InvokeAsync(agent))
- {
- Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'");
- }
- }
- }
-}
diff --git a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAIWithData_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAIWithData_ChatCompletion.cs
index dcfdf7b511f0..39ce395b27b7 100644
--- a/dotnet/samples/Concepts/ChatCompletion/AzureOpenAIWithData_ChatCompletion.cs
+++ b/dotnet/samples/Concepts/ChatCompletion/AzureOpenAIWithData_ChatCompletion.cs
@@ -1,9 +1,9 @@
// Copyright (c) Microsoft. All rights reserved.
-using Azure.AI.OpenAI;
+using Azure.AI.OpenAI.Chat;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
using xRetry;
namespace ChatCompletion;
@@ -47,8 +47,8 @@ public async Task ExampleWithChatCompletionAsync()
chatHistory.AddUserMessage(ask);
// Chat Completion example
- var chatExtensionsOptions = GetAzureChatExtensionsOptions();
- var promptExecutionSettings = new OpenAIPromptExecutionSettings { AzureChatExtensionsOptions = chatExtensionsOptions };
+ var dataSource = GetAzureSearchDataSource();
+ var promptExecutionSettings = new AzureOpenAIPromptExecutionSettings { AzureChatDataSource = dataSource };
var chatCompletion = kernel.GetRequiredService();
@@ -98,8 +98,8 @@ public async Task ExampleWithKernelAsync()
var function = kernel.CreateFunctionFromPrompt("Question: {{$input}}");
- var chatExtensionsOptions = GetAzureChatExtensionsOptions();
- var promptExecutionSettings = new OpenAIPromptExecutionSettings { AzureChatExtensionsOptions = chatExtensionsOptions };
+ var dataSource = GetAzureSearchDataSource();
+ var promptExecutionSettings = new AzureOpenAIPromptExecutionSettings { AzureChatDataSource = dataSource };
// First question without previous context based on uploaded content.
var response = await kernel.InvokeAsync(function, new(promptExecutionSettings) { ["input"] = ask });
@@ -125,20 +125,15 @@ public async Task ExampleWithKernelAsync()
}
///
- /// Initializes a new instance of the class.
+ /// Initializes a new instance of the class.
///
- private static AzureChatExtensionsOptions GetAzureChatExtensionsOptions()
+ private static AzureSearchChatDataSource GetAzureSearchDataSource()
{
- var azureSearchExtensionConfiguration = new AzureSearchChatExtensionConfiguration
+ return new AzureSearchChatDataSource
{
- SearchEndpoint = new Uri(TestConfiguration.AzureAISearch.Endpoint),
- Authentication = new OnYourDataApiKeyAuthenticationOptions(TestConfiguration.AzureAISearch.ApiKey),
+ Endpoint = new Uri(TestConfiguration.AzureAISearch.Endpoint),
+ Authentication = DataSourceAuthentication.FromApiKey(TestConfiguration.AzureAISearch.ApiKey),
IndexName = TestConfiguration.AzureAISearch.IndexName
};
-
- return new AzureChatExtensionsOptions
- {
- Extensions = { azureSearchExtensionConfiguration }
- };
}
}
diff --git a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryAuthorName.cs b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryAuthorName.cs
index 05346974da2f..2d08c507aa4c 100644
--- a/dotnet/samples/Concepts/ChatCompletion/ChatHistoryAuthorName.cs
+++ b/dotnet/samples/Concepts/ChatCompletion/ChatHistoryAuthorName.cs
@@ -2,6 +2,7 @@
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
using Microsoft.SemanticKernel.Connectors.OpenAI;
namespace ChatCompletion;
diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs
index 46aadfc243b0..42164d3fe8dc 100644
--- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs
+++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs
@@ -2,6 +2,7 @@
using Azure.Identity;
using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
using Microsoft.SemanticKernel.Connectors.OpenAI;
namespace ChatCompletion;
diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionMultipleChoices.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionMultipleChoices.cs
deleted file mode 100644
index 9534cac09a63..000000000000
--- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionMultipleChoices.cs
+++ /dev/null
@@ -1,133 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.ChatCompletion;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
-
-namespace ChatCompletion;
-
-///
-/// The following example shows how to use Semantic Kernel with multiple chat completion results.
-///
-public class OpenAI_ChatCompletionMultipleChoices(ITestOutputHelper output) : BaseTest(output)
-{
- ///
- /// Example with multiple chat completion results using .
- ///
- [Fact]
- public async Task MultipleChatCompletionResultsUsingKernelAsync()
- {
- var kernel = Kernel
- .CreateBuilder()
- .AddOpenAIChatCompletion(
- modelId: TestConfiguration.OpenAI.ChatModelId,
- apiKey: TestConfiguration.OpenAI.ApiKey)
- .Build();
-
- // Execution settings with configured ResultsPerPrompt property.
- var executionSettings = new OpenAIPromptExecutionSettings { MaxTokens = 200, ResultsPerPrompt = 3 };
-
- var contents = await kernel.InvokePromptAsync>("Write a paragraph about why AI is awesome", new(executionSettings));
-
- foreach (var content in contents!)
- {
- Console.Write(content.ToString() ?? string.Empty);
- Console.WriteLine("\n-------------\n");
- }
- }
-
- ///
- /// Example with multiple chat completion results using .
- ///
- [Fact]
- public async Task MultipleChatCompletionResultsUsingChatCompletionServiceAsync()
- {
- var kernel = Kernel
- .CreateBuilder()
- .AddOpenAIChatCompletion(
- modelId: TestConfiguration.OpenAI.ChatModelId,
- apiKey: TestConfiguration.OpenAI.ApiKey)
- .Build();
-
- // Execution settings with configured ResultsPerPrompt property.
- var executionSettings = new OpenAIPromptExecutionSettings { MaxTokens = 200, ResultsPerPrompt = 3 };
-
- var chatHistory = new ChatHistory();
- chatHistory.AddUserMessage("Write a paragraph about why AI is awesome");
-
- var chatCompletionService = kernel.GetRequiredService();
-
- foreach (var chatMessageContent in await chatCompletionService.GetChatMessageContentsAsync(chatHistory, executionSettings))
- {
- Console.Write(chatMessageContent.Content ?? string.Empty);
- Console.WriteLine("\n-------------\n");
- }
- }
-
- ///
- /// This example shows how to handle multiple results in case if prompt template contains a call to another prompt function.
- /// is used for result selection.
- ///
- [Fact]
- public async Task MultipleChatCompletionResultsInPromptTemplateAsync()
- {
- var kernel = Kernel
- .CreateBuilder()
- .AddOpenAIChatCompletion(
- modelId: TestConfiguration.OpenAI.ChatModelId,
- apiKey: TestConfiguration.OpenAI.ApiKey)
- .Build();
-
- var executionSettings = new OpenAIPromptExecutionSettings { MaxTokens = 200, ResultsPerPrompt = 3 };
-
- // Initializing a function with execution settings for multiple results.
- // We ask AI to write one paragraph, but in execution settings we specified that we want 3 different results for this request.
- var function = KernelFunctionFactory.CreateFromPrompt("Write a paragraph about why AI is awesome", executionSettings, "GetParagraph");
- var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]);
-
- kernel.Plugins.Add(plugin);
-
- // Add function result selection filter.
- kernel.FunctionInvocationFilters.Add(new FunctionResultSelectionFilter(this.Output));
-
- // Inside our main request, we call MyPlugin.GetParagraph function for text summarization.
- // Taking into account that MyPlugin.GetParagraph function produces 3 results, for text summarization we need to choose only one of them.
- // Registered filter will be invoked during execution, which will select and return only 1 result, and this result will be inserted in our main request for summarization.
- var result = await kernel.InvokePromptAsync("Summarize this text: {{MyPlugin.GetParagraph}}");
-
- // It's possible to check what prompt was rendered for our main request.
- Console.WriteLine($"Rendered prompt: '{result.RenderedPrompt}'");
-
- // Output:
- // Rendered prompt: 'Summarize this text: AI is awesome because...'
- }
-
- ///
- /// Example of filter which is responsible for result selection in case if some function produces multiple results.
- ///
- private sealed class FunctionResultSelectionFilter(ITestOutputHelper output) : IFunctionInvocationFilter
- {
- public async Task OnFunctionInvocationAsync(FunctionInvocationContext context, Func next)
- {
- await next(context);
-
- // Selection logic for function which is expected to produce multiple results.
- if (context.Function.Name == "GetParagraph")
- {
- // Get multiple results from function invocation
- var contents = context.Result.GetValue>()!;
-
- output.WriteLine("Multiple results:");
-
- foreach (var content in contents)
- {
- output.WriteLine(content.ToString());
- }
-
- // Select first result for correct prompt rendering
- var selectedContent = contents[0];
- context.Result = new FunctionResult(context.Function, selectedContent, context.Kernel.Culture, selectedContent.Metadata);
- }
- }
- }
-}
diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs
index 4836dcf03d9f..bd1285e29af3 100644
--- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs
+++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs
@@ -2,6 +2,7 @@
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
using Microsoft.SemanticKernel.Connectors.OpenAI;
namespace ChatCompletion;
diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreamingMultipleChoices.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreamingMultipleChoices.cs
deleted file mode 100644
index 6a23a43ae9f8..000000000000
--- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreamingMultipleChoices.cs
+++ /dev/null
@@ -1,114 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.ChatCompletion;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
-
-namespace ChatCompletion;
-
-// The following example shows how to use Semantic Kernel with multiple streaming chat completion results.
-public class OpenAI_ChatCompletionStreamingMultipleChoices(ITestOutputHelper output) : BaseTest(output)
-{
- [Fact]
- public Task AzureOpenAIMultiStreamingChatCompletionAsync()
- {
- Console.WriteLine("======== Azure OpenAI - Multiple Chat Completions - Raw Streaming ========");
-
- AzureOpenAIChatCompletionService chatCompletionService = new(
- deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName,
- endpoint: TestConfiguration.AzureOpenAI.Endpoint,
- apiKey: TestConfiguration.AzureOpenAI.ApiKey,
- modelId: TestConfiguration.AzureOpenAI.ChatModelId);
-
- return StreamingChatCompletionAsync(chatCompletionService, 3);
- }
-
- [Fact]
- public Task OpenAIMultiStreamingChatCompletionAsync()
- {
- Console.WriteLine("======== OpenAI - Multiple Chat Completions - Raw Streaming ========");
-
- OpenAIChatCompletionService chatCompletionService = new(
- modelId: TestConfiguration.OpenAI.ChatModelId,
- apiKey: TestConfiguration.OpenAI.ApiKey);
-
- return StreamingChatCompletionAsync(chatCompletionService, 3);
- }
-
- ///
- /// Streams the results of a chat completion request to the console.
- ///
- /// Chat completion service to use
- /// Number of results to get for each chat completion request
- private async Task StreamingChatCompletionAsync(IChatCompletionService chatCompletionService,
- int numResultsPerPrompt)
- {
- var executionSettings = new OpenAIPromptExecutionSettings()
- {
- MaxTokens = 200,
- FrequencyPenalty = 0,
- PresencePenalty = 0,
- Temperature = 1,
- TopP = 0.5,
- ResultsPerPrompt = numResultsPerPrompt
- };
-
- var consoleLinesPerResult = 10;
-
- // Uncomment this if you want to use a console app to display the results
- // ClearDisplayByAddingEmptyLines();
-
- var prompt = "Hi, I'm looking for 5 random title names for sci-fi books";
-
- await ProcessStreamAsyncEnumerableAsync(chatCompletionService, prompt, executionSettings, consoleLinesPerResult);
-
- Console.WriteLine();
-
- // Set cursor position to after displayed results
- // Console.SetCursorPosition(0, executionSettings.ResultsPerPrompt * consoleLinesPerResult);
-
- Console.WriteLine();
- }
-
- ///
- /// Does the actual streaming and display of the chat completion.
- ///
- private async Task ProcessStreamAsyncEnumerableAsync(IChatCompletionService chatCompletionService, string prompt,
- OpenAIPromptExecutionSettings executionSettings, int consoleLinesPerResult)
- {
- var messagesPerChoice = new Dictionary();
- var chatHistory = new ChatHistory(prompt);
-
- // For each chat completion update
- await foreach (StreamingChatMessageContent chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, executionSettings))
- {
- // Set cursor position to the beginning of where this choice (i.e. this result of
- // a single multi-result request) is to be displayed.
- // Console.SetCursorPosition(0, chatUpdate.ChoiceIndex * consoleLinesPerResult + 1);
-
- // The first time around, start choice text with role information
- if (!messagesPerChoice.ContainsKey(chatUpdate.ChoiceIndex))
- {
- messagesPerChoice[chatUpdate.ChoiceIndex] = $"Role: {chatUpdate.Role ?? new AuthorRole()}\n";
- Console.Write($"Choice index: {chatUpdate.ChoiceIndex}, Role: {chatUpdate.Role ?? new AuthorRole()}");
- }
-
- // Add latest completion bit, if any
- if (chatUpdate.Content is { Length: > 0 })
- {
- messagesPerChoice[chatUpdate.ChoiceIndex] += chatUpdate.Content;
- }
-
- // Overwrite what is currently in the console area for the updated choice
- // Console.Write(messagesPerChoice[chatUpdate.ChoiceIndex]);
- Console.Write($"Choice index: {chatUpdate.ChoiceIndex}, Content: {chatUpdate.Content}");
- }
-
- // Display the aggregated results
- foreach (string message in messagesPerChoice.Values)
- {
- Console.WriteLine("-------------------");
- Console.WriteLine(message);
- }
- }
-}
diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomAzureOpenAIClient.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomAzureOpenAIClient.cs
index 9e63e4b46975..64228f692799 100644
--- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomAzureOpenAIClient.cs
+++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_CustomAzureOpenAIClient.cs
@@ -1,8 +1,8 @@
// Copyright (c) Microsoft. All rights reserved.
+using System.ClientModel.Primitives;
using Azure;
using Azure.AI.OpenAI;
-using Azure.Core.Pipeline;
using Microsoft.SemanticKernel;
namespace ChatCompletion;
@@ -28,12 +28,12 @@ public async Task RunAsync()
var httpClient = new HttpClient();
httpClient.DefaultRequestHeaders.Add("My-Custom-Header", "My Custom Value");
- // Configure OpenAIClient to use the customized HttpClient
- var clientOptions = new OpenAIClientOptions
+ // Configure AzureOpenAIClient to use the customized HttpClient
+ var clientOptions = new AzureOpenAIClientOptions
{
- Transport = new HttpClientTransport(httpClient),
+ Transport = new HttpClientPipelineTransport(httpClient),
};
- var openAIClient = new OpenAIClient(new Uri(endpoint), new AzureKeyCredential(apiKey), clientOptions);
+ var openAIClient = new AzureOpenAIClient(new Uri(endpoint), new AzureKeyCredential(apiKey), clientOptions);
IKernelBuilder builder = Kernel.CreateBuilder();
builder.AddAzureOpenAIChatCompletion(deploymentName, openAIClient);
diff --git a/dotnet/samples/Concepts/Concepts.csproj b/dotnet/samples/Concepts/Concepts.csproj
index 89cc2c897d61..aa303046bd36 100644
--- a/dotnet/samples/Concepts/Concepts.csproj
+++ b/dotnet/samples/Concepts/Concepts.csproj
@@ -8,7 +8,7 @@
false
true
- $(NoWarn);CS8618,IDE0009,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110
+ $(NoWarn);CS8618,IDE0009,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110,OPENAI001
Library
5ee045b0-aea3-4f08-8d31-32d1a6f8fed0
@@ -41,10 +41,14 @@
-
+
+
+ true
+
+
@@ -100,13 +104,16 @@
-
+
Always
-
+
-
+
Always
-
+
+
+ Always
+
diff --git a/dotnet/samples/Concepts/DependencyInjection/Kernel_Injecting.cs b/dotnet/samples/Concepts/DependencyInjection/Kernel_Injecting.cs
index 4c6e38452fc6..21abae070cf0 100644
--- a/dotnet/samples/Concepts/DependencyInjection/Kernel_Injecting.cs
+++ b/dotnet/samples/Concepts/DependencyInjection/Kernel_Injecting.cs
@@ -14,7 +14,7 @@ public async Task RunAsync()
{
ServiceCollection collection = new();
collection.AddLogging(c => c.AddConsole().SetMinimumLevel(LogLevel.Information));
- collection.AddOpenAITextGeneration(TestConfiguration.OpenAI.ModelId, TestConfiguration.OpenAI.ApiKey);
+ collection.AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey);
collection.AddSingleton();
// Registering class that uses Kernel to execute a plugin
diff --git a/dotnet/samples/Concepts/Functions/FunctionResult_StronglyTyped.cs b/dotnet/samples/Concepts/Functions/FunctionResult_StronglyTyped.cs
index 0b50562583ea..79826de22bec 100644
--- a/dotnet/samples/Concepts/Functions/FunctionResult_StronglyTyped.cs
+++ b/dotnet/samples/Concepts/Functions/FunctionResult_StronglyTyped.cs
@@ -2,8 +2,8 @@
using System.Diagnostics;
using System.Text.Json;
-using Azure.AI.OpenAI;
using Microsoft.SemanticKernel;
+using OpenAI.Chat;
namespace Functions;
@@ -79,11 +79,11 @@ public FunctionResultTestDataGen(FunctionResult functionResult, long executionTi
private TokenCounts? ParseTokenCounts()
{
- CompletionsUsage? usage = FunctionResult.Metadata?["Usage"] as CompletionsUsage;
+ var usage = FunctionResult.Metadata?["Usage"] as ChatTokenUsage;
return new TokenCounts(
- completionTokens: usage?.CompletionTokens ?? 0,
- promptTokens: usage?.PromptTokens ?? 0,
+ completionTokens: usage?.OutputTokens ?? 0,
+ promptTokens: usage?.InputTokens ?? 0,
totalTokens: usage?.TotalTokens ?? 0);
}
diff --git a/dotnet/samples/Concepts/Memory/TextChunkingAndEmbedding.cs b/dotnet/samples/Concepts/Memory/TextChunkingAndEmbedding.cs
index 04a74656e948..fb96579f32a1 100644
--- a/dotnet/samples/Concepts/Memory/TextChunkingAndEmbedding.cs
+++ b/dotnet/samples/Concepts/Memory/TextChunkingAndEmbedding.cs
@@ -1,7 +1,7 @@
// Copyright (c) Microsoft. All rights reserved.
using Microsoft.ML.Tokenizers;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
using Microsoft.SemanticKernel.Text;
namespace Memory;
diff --git a/dotnet/samples/Concepts/Memory/TextMemoryPlugin_RecallJsonSerializationWithOptions.cs b/dotnet/samples/Concepts/Memory/TextMemoryPlugin_RecallJsonSerializationWithOptions.cs
index fbc313adebf4..883195b68df9 100644
--- a/dotnet/samples/Concepts/Memory/TextMemoryPlugin_RecallJsonSerializationWithOptions.cs
+++ b/dotnet/samples/Concepts/Memory/TextMemoryPlugin_RecallJsonSerializationWithOptions.cs
@@ -4,7 +4,7 @@
using System.Text.Json;
using System.Text.Unicode;
using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
using Microsoft.SemanticKernel.Memory;
using Microsoft.SemanticKernel.Plugins.Memory;
diff --git a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs
index db8e259f4e7a..cbfc5c1b0b24 100644
--- a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs
+++ b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_CustomMapper.cs
@@ -3,7 +3,7 @@
using System.Text.Json;
using System.Text.Json.Nodes;
using Memory.VectorStoreFixtures;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
using Microsoft.SemanticKernel.Connectors.Redis;
using Microsoft.SemanticKernel.Data;
using Microsoft.SemanticKernel.Embeddings;
diff --git a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_MultiStore.cs b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_MultiStore.cs
index 18f0e5b476ca..6aa4d84cebab 100644
--- a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_MultiStore.cs
+++ b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_MultiStore.cs
@@ -4,7 +4,7 @@
using Memory.VectorStoreFixtures;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
using Microsoft.SemanticKernel.Connectors.Qdrant;
using Microsoft.SemanticKernel.Connectors.Redis;
using Microsoft.SemanticKernel.Data;
diff --git a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_Simple.cs b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_Simple.cs
index 341e5c2bbda2..75013b8196ac 100644
--- a/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_Simple.cs
+++ b/dotnet/samples/Concepts/Memory/VectorStore_DataIngestion_Simple.cs
@@ -2,7 +2,7 @@
using System.Text.Json;
using Memory.VectorStoreFixtures;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
using Microsoft.SemanticKernel.Connectors.Qdrant;
using Microsoft.SemanticKernel.Data;
using Microsoft.SemanticKernel.Embeddings;
diff --git a/dotnet/samples/Concepts/Planners/AutoFunctionCallingPlanning.cs b/dotnet/samples/Concepts/Planners/AutoFunctionCallingPlanning.cs
index 4c287a63a216..38e3e53a0e74 100644
--- a/dotnet/samples/Concepts/Planners/AutoFunctionCallingPlanning.cs
+++ b/dotnet/samples/Concepts/Planners/AutoFunctionCallingPlanning.cs
@@ -7,13 +7,13 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
-using Azure.AI.OpenAI;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using Microsoft.SemanticKernel.Planning;
+using OpenAI.Chat;
namespace Planners;
@@ -328,7 +328,7 @@ private int GetChatHistoryTokens(ChatHistory? chatHistory)
{
if (message.Metadata is not null &&
message.Metadata.TryGetValue("Usage", out object? usage) &&
- usage is CompletionsUsage completionsUsage &&
+ usage is ChatTokenUsage completionsUsage &&
completionsUsage is not null)
{
tokens += completionsUsage.TotalTokens;
diff --git a/dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs b/dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs
index 7111e873cf4c..c383ea9025f1 100644
--- a/dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs
+++ b/dotnet/samples/Concepts/Resources/Plugins/LegacyMenuPlugin.cs
@@ -7,12 +7,6 @@ namespace Plugins;
public sealed class LegacyMenuPlugin
{
- public const string CorrelationIdArgument = "correlationId";
-
- private readonly List _correlationIds = [];
-
- public IReadOnlyList CorrelationIds => this._correlationIds;
-
///
/// Returns a mock item menu.
///
@@ -20,8 +14,6 @@ public sealed class LegacyMenuPlugin
[System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")]
public string[] GetSpecials(KernelArguments? arguments)
{
- CaptureCorrelationId(arguments, nameof(GetSpecials));
-
return
[
"Special Soup: Clam Chowder",
@@ -39,8 +31,6 @@ public string GetItemPrice(
string menuItem,
KernelArguments? arguments)
{
- CaptureCorrelationId(arguments, nameof(GetItemPrice));
-
return "$9.99";
}
@@ -55,21 +45,6 @@ public bool IsItem86d(
int count,
KernelArguments? arguments)
{
- CaptureCorrelationId(arguments, nameof(IsItem86d));
-
return count < 3;
}
-
- private void CaptureCorrelationId(KernelArguments? arguments, string scope)
- {
- if (arguments?.TryGetValue(CorrelationIdArgument, out object? correlationId) ?? false)
- {
- string? correlationText = correlationId?.ToString();
-
- if (!string.IsNullOrWhiteSpace(correlationText))
- {
- this._correlationIds.Add($"{scope}:{correlationText}");
- }
- }
- }
}
diff --git a/dotnet/samples/Concepts/Resources/Plugins/MenuPlugin.cs b/dotnet/samples/Concepts/Resources/Plugins/MenuPlugin.cs
deleted file mode 100644
index be82177eda5d..000000000000
--- a/dotnet/samples/Concepts/Resources/Plugins/MenuPlugin.cs
+++ /dev/null
@@ -1,34 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System.ComponentModel;
-using Microsoft.SemanticKernel;
-
-namespace Plugins;
-
-public sealed class MenuPlugin
-{
- public const string CorrelationIdArgument = "correlationId";
-
- private readonly List _correlationIds = [];
-
- public IReadOnlyList CorrelationIds => this._correlationIds;
-
- [KernelFunction, Description("Provides a list of specials from the menu.")]
- [System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")]
- public string GetSpecials()
- {
- return @"
-Special Soup: Clam Chowder
-Special Salad: Cobb Salad
-Special Drink: Chai Tea
-";
- }
-
- [KernelFunction, Description("Provides the price of the requested menu item.")]
- public string GetItemPrice(
- [Description("The name of the menu item.")]
- string menuItem)
- {
- return "$9.99";
- }
-}
diff --git a/dotnet/samples/Concepts/TextGeneration/OpenAI_TextGenerationStreaming.cs b/dotnet/samples/Concepts/TextGeneration/OpenAI_TextGenerationStreaming.cs
index 44b7806a1355..bb906bb6d05c 100644
--- a/dotnet/samples/Concepts/TextGeneration/OpenAI_TextGenerationStreaming.cs
+++ b/dotnet/samples/Concepts/TextGeneration/OpenAI_TextGenerationStreaming.cs
@@ -1,5 +1,6 @@
// Copyright (c) Microsoft. All rights reserved.
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using Microsoft.SemanticKernel.TextGeneration;
@@ -22,11 +23,11 @@ public Task AzureOpenAITextGenerationStreamAsync()
{
Console.WriteLine("======== Azure OpenAI - Text Generation - Raw Streaming ========");
- var textGeneration = new AzureOpenAITextGenerationService(
- deploymentName: TestConfiguration.AzureOpenAI.DeploymentName,
+ var textGeneration = new AzureOpenAIChatCompletionService(
+ deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName,
endpoint: TestConfiguration.AzureOpenAI.Endpoint,
apiKey: TestConfiguration.AzureOpenAI.ApiKey,
- modelId: TestConfiguration.AzureOpenAI.ModelId);
+ modelId: TestConfiguration.AzureOpenAI.ChatModelId);
return this.TextGenerationStreamAsync(textGeneration);
}
@@ -36,7 +37,7 @@ public Task OpenAITextGenerationStreamAsync()
{
Console.WriteLine("======== Open AI - Text Generation - Raw Streaming ========");
- var textGeneration = new OpenAITextGenerationService("gpt-3.5-turbo-instruct", TestConfiguration.OpenAI.ApiKey);
+ var textGeneration = new OpenAIChatCompletionService(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey);
return this.TextGenerationStreamAsync(textGeneration);
}
diff --git a/dotnet/samples/Demos/BookingRestaurant/BookingRestaurant.csproj b/dotnet/samples/Demos/BookingRestaurant/BookingRestaurant.csproj
index 2f744127417e..678819305a93 100644
--- a/dotnet/samples/Demos/BookingRestaurant/BookingRestaurant.csproj
+++ b/dotnet/samples/Demos/BookingRestaurant/BookingRestaurant.csproj
@@ -22,7 +22,7 @@
-
+
diff --git a/dotnet/samples/Demos/CreateChatGptPlugin/MathPlugin/azure-function/sk-chatgpt-azure-function.csproj b/dotnet/samples/Demos/CreateChatGptPlugin/MathPlugin/azure-function/sk-chatgpt-azure-function.csproj
index 3c6ca9a15470..805e10f7d5ac 100644
--- a/dotnet/samples/Demos/CreateChatGptPlugin/MathPlugin/azure-function/sk-chatgpt-azure-function.csproj
+++ b/dotnet/samples/Demos/CreateChatGptPlugin/MathPlugin/azure-function/sk-chatgpt-azure-function.csproj
@@ -28,7 +28,7 @@
-
+
diff --git a/dotnet/samples/Demos/CreateChatGptPlugin/README.md b/dotnet/samples/Demos/CreateChatGptPlugin/README.md
index 3394ad2b1693..e9e035272d3d 100644
--- a/dotnet/samples/Demos/CreateChatGptPlugin/README.md
+++ b/dotnet/samples/Demos/CreateChatGptPlugin/README.md
@@ -16,17 +16,16 @@ The sample can be configured by using the command line with .NET [Secret Manager
This sample has been tested with the following models:
-| Service | Model type | Model | Model version | Supported |
-| ------------ | --------------- | ---------------- | ------------: | --------- |
-| OpenAI | Text Completion | text-davinci-003 | 1 | ❌ |
-| OpenAI | Chat Completion | gpt-3.5-turbo | 1 | ❌ |
-| OpenAI | Chat Completion | gpt-3.5-turbo | 0301 | ❌ |
-| Azure OpenAI | Chat Completion | gpt-3.5-turbo | 0613 | ✅ |
-| Azure OpenAI | Chat Completion | gpt-3.5-turbo | 1106 | ✅ |
-| OpenAI | Chat Completion | gpt-4 | 1 | ❌ |
-| OpenAI | Chat Completion | gpt-4 | 0314 | ❌ |
-| Azure OpenAI | Chat Completion | gpt-4 | 0613 | ✅ |
-| Azure OpenAI | Chat Completion | gpt-4 | 1106 | ✅ |
+| Service | Model | Model version | Supported |
+| ------------ | ---------------- | ------------: | --------- |
+| OpenAI | gpt-3.5-turbo | 1 | ❌ |
+| OpenAI | gpt-3.5-turbo | 0301 | ❌ |
+| Azure OpenAI | gpt-3.5-turbo | 0613 | ✅ |
+| Azure OpenAI | gpt-3.5-turbo | 1106 | ✅ |
+| OpenAI | gpt-4 | 1 | ❌ |
+| OpenAI | gpt-4 | 0314 | ❌ |
+| Azure OpenAI | gpt-4 | 0613 | ✅ |
+| Azure OpenAI | gpt-4 | 1106 | ✅ |
This sample uses function calling, so it only works on models newer than 0613.
@@ -39,7 +38,6 @@ cd 14-Create-ChatGPT-Plugin/Solution
dotnet user-secrets set "Global:LlmService" "OpenAI"
-dotnet user-secrets set "OpenAI:ModelType" "chat-completion"
dotnet user-secrets set "OpenAI:ChatCompletionModelId" "gpt-4"
dotnet user-secrets set "OpenAI:ApiKey" "... your OpenAI key ..."
dotnet user-secrets set "OpenAI:OrgId" "... your ord ID ..."
@@ -52,7 +50,6 @@ cd 14-Create-ChatGPT-Plugin/Solution
dotnet user-secrets set "Global:LlmService" "AzureOpenAI"
-dotnet user-secrets set "AzureOpenAI:DeploymentType" "chat-completion"
dotnet user-secrets set "AzureOpenAI:ChatCompletionDeploymentName" "gpt-35-turbo"
dotnet user-secrets set "AzureOpenAI:ChatCompletionModelId" "gpt-3.5-turbo-0613"
dotnet user-secrets set "AzureOpenAI:Endpoint" "... your Azure OpenAI endpoint ..."
diff --git a/dotnet/samples/Demos/CreateChatGptPlugin/Solution/CreateChatGptPlugin.csproj b/dotnet/samples/Demos/CreateChatGptPlugin/Solution/CreateChatGptPlugin.csproj
index a81e39b415e4..a663838e564b 100644
--- a/dotnet/samples/Demos/CreateChatGptPlugin/Solution/CreateChatGptPlugin.csproj
+++ b/dotnet/samples/Demos/CreateChatGptPlugin/Solution/CreateChatGptPlugin.csproj
@@ -16,8 +16,8 @@
+
-
@@ -26,4 +26,8 @@
+
+
+
+
diff --git a/dotnet/samples/Demos/CreateChatGptPlugin/Solution/config/KernelBuilderExtensions.cs b/dotnet/samples/Demos/CreateChatGptPlugin/Solution/config/KernelBuilderExtensions.cs
index 3ba36e2bbdb8..a823ac316880 100644
--- a/dotnet/samples/Demos/CreateChatGptPlugin/Solution/config/KernelBuilderExtensions.cs
+++ b/dotnet/samples/Demos/CreateChatGptPlugin/Solution/config/KernelBuilderExtensions.cs
@@ -14,47 +14,24 @@ internal static IKernelBuilder WithCompletionService(this IKernelBuilder kernelB
switch (Env.Var("Global:LlmService")!)
{
case "AzureOpenAI":
- if (Env.Var("AzureOpenAI:DeploymentType") == "text-completion")
- {
- kernelBuilder.Services.AddAzureOpenAITextGeneration(
- deploymentName: Env.Var("AzureOpenAI:TextCompletionDeploymentName")!,
- modelId: Env.Var("AzureOpenAI:TextCompletionModelId"),
- endpoint: Env.Var("AzureOpenAI:Endpoint")!,
- apiKey: Env.Var("AzureOpenAI:ApiKey")!
- );
- }
- else if (Env.Var("AzureOpenAI:DeploymentType") == "chat-completion")
- {
- kernelBuilder.Services.AddAzureOpenAIChatCompletion(
- deploymentName: Env.Var("AzureOpenAI:ChatCompletionDeploymentName")!,
- modelId: Env.Var("AzureOpenAI:ChatCompletionModelId"),
- endpoint: Env.Var("AzureOpenAI:Endpoint")!,
- apiKey: Env.Var("AzureOpenAI:ApiKey")!
- );
- }
+ kernelBuilder.Services.AddAzureOpenAIChatCompletion(
+ deploymentName: Env.Var("AzureOpenAI:ChatCompletionDeploymentName")!,
+ modelId: Env.Var("AzureOpenAI:ChatCompletionModelId"),
+ endpoint: Env.Var("AzureOpenAI:Endpoint")!,
+ apiKey: Env.Var("AzureOpenAI:ApiKey")!
+ );
break;
case "OpenAI":
- if (Env.Var("OpenAI:ModelType") == "text-completion")
- {
- kernelBuilder.Services.AddOpenAITextGeneration(
- modelId: Env.Var("OpenAI:TextCompletionModelId")!,
- apiKey: Env.Var("OpenAI:ApiKey")!,
- orgId: Env.Var("OpenAI:OrgId")
- );
- }
- else if (Env.Var("OpenAI:ModelType") == "chat-completion")
- {
- kernelBuilder.Services.AddOpenAIChatCompletion(
- modelId: Env.Var("OpenAI:ChatCompletionModelId")!,
- apiKey: Env.Var("OpenAI:ApiKey")!,
- orgId: Env.Var("OpenAI:OrgId")
- );
- }
+ kernelBuilder.Services.AddOpenAIChatCompletion(
+ modelId: Env.Var("OpenAI:ChatCompletionModelId")!,
+ apiKey: Env.Var("OpenAI:ApiKey")!,
+ orgId: Env.Var("OpenAI:OrgId")
+ );
break;
default:
- throw new ArgumentException($"Invalid service type value: {Env.Var("OpenAI:ModelType")}");
+ throw new ArgumentException($"Invalid service type value: {Env.Var("Global:LlmService")}");
}
return kernelBuilder;
diff --git a/dotnet/samples/Demos/FunctionInvocationApproval/FunctionInvocationApproval.csproj b/dotnet/samples/Demos/FunctionInvocationApproval/FunctionInvocationApproval.csproj
index ead3b5036cb4..e39a7f5b795d 100644
--- a/dotnet/samples/Demos/FunctionInvocationApproval/FunctionInvocationApproval.csproj
+++ b/dotnet/samples/Demos/FunctionInvocationApproval/FunctionInvocationApproval.csproj
@@ -13,7 +13,7 @@
-
+
diff --git a/dotnet/samples/Demos/HomeAutomation/Options/AzureOpenAI.cs b/dotnet/samples/Demos/HomeAutomation/Options/AzureOpenAIOptions.cs
similarity index 91%
rename from dotnet/samples/Demos/HomeAutomation/Options/AzureOpenAI.cs
rename to dotnet/samples/Demos/HomeAutomation/Options/AzureOpenAIOptions.cs
index f4096b5e95d5..ef20853597cc 100644
--- a/dotnet/samples/Demos/HomeAutomation/Options/AzureOpenAI.cs
+++ b/dotnet/samples/Demos/HomeAutomation/Options/AzureOpenAIOptions.cs
@@ -7,7 +7,7 @@ namespace HomeAutomation.Options;
///
/// Azure OpenAI settings.
///
-public sealed class AzureOpenAI
+public sealed class AzureOpenAIOptions
{
[Required]
public string ChatDeploymentName { get; set; } = string.Empty;
diff --git a/dotnet/samples/Demos/HomeAutomation/Program.cs b/dotnet/samples/Demos/HomeAutomation/Program.cs
index e55279405ceb..8f4882e3303f 100644
--- a/dotnet/samples/Demos/HomeAutomation/Program.cs
+++ b/dotnet/samples/Demos/HomeAutomation/Program.cs
@@ -32,24 +32,25 @@ internal static async Task Main(string[] args)
builder.Services.AddHostedService();
// Get configuration
- builder.Services.AddOptions()
- .Bind(builder.Configuration.GetSection(nameof(AzureOpenAI)))
+ builder.Services.AddOptions()
+ .Bind(builder.Configuration.GetSection(nameof(AzureOpenAIOptions)))
.ValidateDataAnnotations()
.ValidateOnStart();
// Chat completion service that kernels will use
builder.Services.AddSingleton(sp =>
{
- AzureOpenAI options = sp.GetRequiredService>().Value;
+ OpenAIOptions options = sp.GetRequiredService>().Value;
// A custom HttpClient can be provided to this constructor
- return new AzureOpenAIChatCompletionService(options.ChatDeploymentName, options.Endpoint, options.ApiKey);
+ return new OpenAIChatCompletionService(options.ChatModelId, options.ApiKey);
- /* Alternatively, you can use plain, non-Azure OpenAI after loading OpenAIOptions instead
- of AzureOpenAI options with builder.Services.AddOptions:
- OpenAI options = sp.GetRequiredService>().Value;
+ /* Alternatively, you can use plain, Azure OpenAI after loading AzureOpenAIOptions instead
+ of OpenAI options with builder.Services.AddOptions:
- return new OpenAIChatCompletionService(options.ChatModelId, options.ApiKey);*/
+ AzureOpenAIOptions options = sp.GetRequiredService>().Value;
+
+ return new AzureOpenAIChatCompletionService(options.ChatDeploymentName, options.Endpoint, options.ApiKey); */
});
// Add plugins that can be used by kernels
diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/AutoFunctionCallingController.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/AutoFunctionCallingController.cs
index 8878bc0b57e5..37a390fee69a 100644
--- a/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/AutoFunctionCallingController.cs
+++ b/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/AutoFunctionCallingController.cs
@@ -1,5 +1,8 @@
// Copyright (c) Microsoft. All rights reserved.
+#pragma warning disable IDE0005 // Using directive is unnecessary
+
+using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
@@ -8,6 +11,8 @@
using StepwisePlannerMigration.Plugins;
using StepwisePlannerMigration.Services;
+#pragma warning restore IDE0005 // Using directive is unnecessary
+
namespace StepwisePlannerMigration.Controllers;
///
diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/StepwisePlannerController.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/StepwisePlannerController.cs
index f060268833ca..096ce4795fb3 100644
--- a/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/StepwisePlannerController.cs
+++ b/dotnet/samples/Demos/StepwisePlannerMigration/Controllers/StepwisePlannerController.cs
@@ -1,5 +1,8 @@
// Copyright (c) Microsoft. All rights reserved.
+#pragma warning disable IDE0005 // Using directive is unnecessary
+
+using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
@@ -8,6 +11,8 @@
using StepwisePlannerMigration.Plugins;
using StepwisePlannerMigration.Services;
+#pragma warning restore IDE0005 // Using directive is unnecessary
+
namespace StepwisePlannerMigration.Controllers;
///
diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Extensions/ConfigurationExtensions.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Extensions/ConfigurationExtensions.cs
index a7eca68c33c8..3407d79479ed 100644
--- a/dotnet/samples/Demos/StepwisePlannerMigration/Extensions/ConfigurationExtensions.cs
+++ b/dotnet/samples/Demos/StepwisePlannerMigration/Extensions/ConfigurationExtensions.cs
@@ -1,6 +1,7 @@
// Copyright (c) Microsoft. All rights reserved.
using System.ComponentModel.DataAnnotations;
+using Microsoft.Extensions.Configuration;
namespace StepwisePlannerMigration.Extensions;
diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/TimePlugin.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/TimePlugin.cs
index 7a1ce92d0a71..80b976702ed3 100644
--- a/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/TimePlugin.cs
+++ b/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/TimePlugin.cs
@@ -1,8 +1,13 @@
// Copyright (c) Microsoft. All rights reserved.
+#pragma warning disable IDE0005 // Using directive is unnecessary
+
+using System;
using System.ComponentModel;
using Microsoft.SemanticKernel;
+#pragma warning restore IDE0005 // Using directive is unnecessary
+
namespace StepwisePlannerMigration.Plugins;
///
diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/WeatherPlugin.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/WeatherPlugin.cs
index dfd72dd36c2c..52658a47e13e 100644
--- a/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/WeatherPlugin.cs
+++ b/dotnet/samples/Demos/StepwisePlannerMigration/Plugins/WeatherPlugin.cs
@@ -1,8 +1,12 @@
// Copyright (c) Microsoft. All rights reserved.
+#pragma warning disable IDE0005 // Using directive is unnecessary
+
using System.ComponentModel;
using Microsoft.SemanticKernel;
+#pragma warning restore IDE0005 // Using directive is unnecessary
+
namespace StepwisePlannerMigration.Plugins;
///
diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Program.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Program.cs
index 99b62fba30b7..cd9186d405b2 100644
--- a/dotnet/samples/Demos/StepwisePlannerMigration/Program.cs
+++ b/dotnet/samples/Demos/StepwisePlannerMigration/Program.cs
@@ -1,5 +1,10 @@
// Copyright (c) Microsoft. All rights reserved.
+using System.IO;
+using Microsoft.AspNetCore.Builder;
+using Microsoft.Extensions.Configuration;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Planning;
using StepwisePlannerMigration.Extensions;
diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Services/IPlanProvider.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Services/IPlanProvider.cs
index 4bdae07f6ed7..695a3a18e9c9 100644
--- a/dotnet/samples/Demos/StepwisePlannerMigration/Services/IPlanProvider.cs
+++ b/dotnet/samples/Demos/StepwisePlannerMigration/Services/IPlanProvider.cs
@@ -1,7 +1,11 @@
// Copyright (c) Microsoft. All rights reserved.
+#pragma warning disable IDE0005 // Using directive is unnecessary
+
using Microsoft.SemanticKernel.ChatCompletion;
+#pragma warning restore IDE0005 // Using directive is unnecessary
+
namespace StepwisePlannerMigration.Services;
///
diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/Services/PlanProvider.cs b/dotnet/samples/Demos/StepwisePlannerMigration/Services/PlanProvider.cs
index 13218eeec135..ed5bd4f03fe1 100644
--- a/dotnet/samples/Demos/StepwisePlannerMigration/Services/PlanProvider.cs
+++ b/dotnet/samples/Demos/StepwisePlannerMigration/Services/PlanProvider.cs
@@ -1,8 +1,14 @@
// Copyright (c) Microsoft. All rights reserved.
+using System.IO;
using System.Text.Json;
+
+#pragma warning disable IDE0005 // Using directive is unnecessary
+
using Microsoft.SemanticKernel.ChatCompletion;
+#pragma warning restore IDE0005 // Using directive is unnecessary
+
namespace StepwisePlannerMigration.Services;
///
diff --git a/dotnet/samples/Demos/StepwisePlannerMigration/StepwisePlannerMigration.csproj b/dotnet/samples/Demos/StepwisePlannerMigration/StepwisePlannerMigration.csproj
index 1475397e7eb2..abd289077625 100644
--- a/dotnet/samples/Demos/StepwisePlannerMigration/StepwisePlannerMigration.csproj
+++ b/dotnet/samples/Demos/StepwisePlannerMigration/StepwisePlannerMigration.csproj
@@ -3,7 +3,6 @@
net8.0
enable
- enable
$(NoWarn);VSTHRD111,CA2007,CS8618,CS1591,SKEXP0001, SKEXP0060
5ee045b0-aea3-4f08-8d31-32d1a6f8fed0
diff --git a/dotnet/samples/Demos/TelemetryWithAppInsights/TelemetryWithAppInsights.csproj b/dotnet/samples/Demos/TelemetryWithAppInsights/TelemetryWithAppInsights.csproj
index aaf0e5545b76..ac5b79837338 100644
--- a/dotnet/samples/Demos/TelemetryWithAppInsights/TelemetryWithAppInsights.csproj
+++ b/dotnet/samples/Demos/TelemetryWithAppInsights/TelemetryWithAppInsights.csproj
@@ -18,8 +18,8 @@
+
-
diff --git a/dotnet/samples/GettingStarted/GettingStarted.csproj b/dotnet/samples/GettingStarted/GettingStarted.csproj
index bbfb30f31a72..81581e7b4d57 100644
--- a/dotnet/samples/GettingStarted/GettingStarted.csproj
+++ b/dotnet/samples/GettingStarted/GettingStarted.csproj
@@ -50,7 +50,7 @@
-
+
@@ -60,6 +60,6 @@
-
+
\ No newline at end of file
diff --git a/dotnet/samples/GettingStarted/Step4_Dependency_Injection.cs b/dotnet/samples/GettingStarted/Step4_Dependency_Injection.cs
index 15d90a3c7b53..dd39962d627a 100644
--- a/dotnet/samples/GettingStarted/Step4_Dependency_Injection.cs
+++ b/dotnet/samples/GettingStarted/Step4_Dependency_Injection.cs
@@ -41,7 +41,7 @@ private ServiceProvider BuildServiceProvider()
collection.AddSingleton(new XunitLogger(this.Output));
var kernelBuilder = collection.AddKernel();
- kernelBuilder.Services.AddOpenAITextGeneration(TestConfiguration.OpenAI.ModelId, TestConfiguration.OpenAI.ApiKey);
+ kernelBuilder.Services.AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey);
kernelBuilder.Plugins.AddFromType();
return collection.BuildServiceProvider();
diff --git a/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj b/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj
index ea4decbf86bb..df9e025b678f 100644
--- a/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj
+++ b/dotnet/samples/GettingStartedWithAgents/GettingStartedWithAgents.csproj
@@ -9,7 +9,7 @@
true
- $(NoWarn);CS8618,IDE0009,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110
+ $(NoWarn);CS8618,IDE0009,CA1051,CA1050,CA1707,CA1054,CA2007,VSTHRD111,CS1591,RCS1110,RCS1243,CA5394,SKEXP0001,SKEXP0010,SKEXP0020,SKEXP0040,SKEXP0050,SKEXP0060,SKEXP0070,SKEXP0101,SKEXP0110,OPENAI001
Library
5ee045b0-aea3-4f08-8d31-32d1a6f8fed0
@@ -32,13 +32,16 @@
-
+
+
+ true
+
-
+
@@ -48,4 +51,14 @@
+
+
+ Always
+
+
+
+
+
+
+
diff --git a/dotnet/samples/GettingStartedWithAgents/README.md b/dotnet/samples/GettingStartedWithAgents/README.md
index 39952506548c..ed0e68802994 100644
--- a/dotnet/samples/GettingStartedWithAgents/README.md
+++ b/dotnet/samples/GettingStartedWithAgents/README.md
@@ -19,13 +19,17 @@ The getting started with agents examples include:
Example|Description
---|---
-[Step1_Agent](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step1_Agent.cs)|How to create and use an agent.
-[Step2_Plugins](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step2_Plugins.cs)|How to associate plug-ins with an agent.
-[Step3_Chat](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step3_Chat.cs)|How to create a conversation between agents.
-[Step4_KernelFunctionStrategies](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step4_KernelFunctionStrategies.cs)|How to utilize a `KernelFunction` as a _chat strategy_.
-[Step5_JsonResult](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step5_JsonResult.cs)|How to have an agent produce JSON.
-[Step6_DependencyInjection](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step6_DependencyInjection.cs)|How to define dependency injection patterns for agents.
-[Step7_OpenAIAssistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step7_OpenAIAssistant.cs)|How to create an Open AI Assistant agent.
+[Step01_Agent](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs)|How to create and use an agent.
+[Step02_Plugins](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step02_Plugins.cs)|How to associate plug-ins with an agent.
+[Step03_Chat](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step03_Chat.cs)|How to create a conversation between agents.
+[Step04_KernelFunctionStrategies](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs)|How to utilize a `KernelFunction` as a _chat strategy_.
+[Step05_JsonResult](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step05_JsonResult.cs)|How to have an agent produce JSON.
+[Step06_DependencyInjection](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs)|How to define dependency injection patterns for agents.
+[Step07_Logging](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step07_Logging.cs)|How to enable logging for agents.
+[Step08_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs)|How to create an Open AI Assistant agent.
+[Step09_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs)|How to provide an image as input to an Open AI Assistant agent.
+[Step10_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter_.cs)|How to use the code-interpreter tool for an Open AI Assistant agent.
+[Step11_Assistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs)|How to use the file-search tool for an Open AI Assistant agent.
## Legacy Agents
diff --git a/dotnet/samples/GettingStartedWithAgents/Resources/cat.jpg b/dotnet/samples/GettingStartedWithAgents/Resources/cat.jpg
new file mode 100644
index 000000000000..1e9f26de48fc
Binary files /dev/null and b/dotnet/samples/GettingStartedWithAgents/Resources/cat.jpg differ
diff --git a/dotnet/samples/GettingStartedWithAgents/Resources/employees.pdf b/dotnet/samples/GettingStartedWithAgents/Resources/employees.pdf
new file mode 100644
index 000000000000..bba45f80a90b
Binary files /dev/null and b/dotnet/samples/GettingStartedWithAgents/Resources/employees.pdf differ
diff --git a/dotnet/samples/GettingStartedWithAgents/Step1_Agent.cs b/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs
similarity index 76%
rename from dotnet/samples/GettingStartedWithAgents/Step1_Agent.cs
rename to dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs
index d7d4a0471b01..bc5bee5249e5 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step1_Agent.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step01_Agent.cs
@@ -9,7 +9,7 @@ namespace GettingStarted;
/// Demonstrate creation of and
/// eliciting its response to three explicit user messages.
///
-public class Step1_Agent(ITestOutputHelper output) : BaseTest(output)
+public class Step01_Agent(ITestOutputHelper output) : BaseAgentsTest(output)
{
private const string ParrotName = "Parrot";
private const string ParrotInstructions = "Repeat the user message in the voice of a pirate and then end with a parrot sound.";
@@ -37,15 +37,15 @@ public async Task UseSingleChatCompletionAgentAsync()
// Local function to invoke agent and display the conversation messages.
async Task InvokeAgentAsync(string input)
{
- chat.Add(new ChatMessageContent(AuthorRole.User, input));
+ ChatMessageContent message = new(AuthorRole.User, input);
+ chat.Add(message);
+ this.WriteAgentChatMessage(message);
- Console.WriteLine($"# {AuthorRole.User}: '{input}'");
-
- await foreach (ChatMessageContent content in agent.InvokeAsync(chat))
+ await foreach (ChatMessageContent response in agent.InvokeAsync(chat))
{
- chat.Add(content);
+ chat.Add(response);
- Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'");
+ this.WriteAgentChatMessage(response);
}
}
}
diff --git a/dotnet/samples/GettingStartedWithAgents/Step2_Plugins.cs b/dotnet/samples/GettingStartedWithAgents/Step02_Plugins.cs
similarity index 76%
rename from dotnet/samples/GettingStartedWithAgents/Step2_Plugins.cs
rename to dotnet/samples/GettingStartedWithAgents/Step02_Plugins.cs
index 7946adc7f687..29394991dcc4 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step2_Plugins.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step02_Plugins.cs
@@ -11,7 +11,7 @@ namespace GettingStarted;
/// Demonstrate creation of with a ,
/// and then eliciting its response to explicit user messages.
///
-public class Step2_Plugins(ITestOutputHelper output) : BaseTest(output)
+public class Step02_Plugins(ITestOutputHelper output) : BaseAgentsTest(output)
{
private const string HostName = "Host";
private const string HostInstructions = "Answer questions about the menu.";
@@ -45,37 +45,34 @@ public async Task UseChatCompletionWithPluginAgentAsync()
// Local function to invoke agent and display the conversation messages.
async Task InvokeAgentAsync(string input)
{
- chat.Add(new ChatMessageContent(AuthorRole.User, input));
- Console.WriteLine($"# {AuthorRole.User}: '{input}'");
+ ChatMessageContent message = new(AuthorRole.User, input);
+ chat.Add(message);
+ this.WriteAgentChatMessage(message);
- await foreach (ChatMessageContent content in agent.InvokeAsync(chat))
+ await foreach (ChatMessageContent response in agent.InvokeAsync(chat))
{
- chat.Add(content);
+ chat.Add(response);
- Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'");
+ this.WriteAgentChatMessage(response);
}
}
}
- public sealed class MenuPlugin
+ private sealed class MenuPlugin
{
[KernelFunction, Description("Provides a list of specials from the menu.")]
[System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")]
- public string GetSpecials()
- {
- return @"
-Special Soup: Clam Chowder
-Special Salad: Cobb Salad
-Special Drink: Chai Tea
-";
- }
+ public string GetSpecials() =>
+ """
+ Special Soup: Clam Chowder
+ Special Salad: Cobb Salad
+ Special Drink: Chai Tea
+ """;
[KernelFunction, Description("Provides the price of the requested menu item.")]
public string GetItemPrice(
[Description("The name of the menu item.")]
- string menuItem)
- {
- return "$9.99";
- }
+ string menuItem) =>
+ "$9.99";
}
}
diff --git a/dotnet/samples/GettingStartedWithAgents/Step3_Chat.cs b/dotnet/samples/GettingStartedWithAgents/Step03_Chat.cs
similarity index 86%
rename from dotnet/samples/GettingStartedWithAgents/Step3_Chat.cs
rename to dotnet/samples/GettingStartedWithAgents/Step03_Chat.cs
index 5d0c185f95f5..1ada85d512f3 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step3_Chat.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step03_Chat.cs
@@ -11,7 +11,7 @@ namespace GettingStarted;
/// that inform how chat proceeds with regards to: Agent selection, chat continuation, and maximum
/// number of agent interactions.
///
-public class Step3_Chat(ITestOutputHelper output) : BaseTest(output)
+public class Step03_Chat(ITestOutputHelper output) : BaseAgentsTest(output)
{
private const string ReviewerName = "ArtDirector";
private const string ReviewerInstructions =
@@ -74,16 +74,16 @@ public async Task UseAgentGroupChatWithTwoAgentsAsync()
};
// Invoke chat and display messages.
- string input = "concept: maps made out of egg cartons.";
- chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input));
- Console.WriteLine($"# {AuthorRole.User}: '{input}'");
+ ChatMessageContent input = new(AuthorRole.User, "concept: maps made out of egg cartons.");
+ chat.AddChatMessage(input);
+ this.WriteAgentChatMessage(input);
- await foreach (ChatMessageContent content in chat.InvokeAsync())
+ await foreach (ChatMessageContent response in chat.InvokeAsync())
{
- Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'");
+ this.WriteAgentChatMessage(response);
}
- Console.WriteLine($"# IS COMPLETE: {chat.IsComplete}");
+ Console.WriteLine($"\n[IS COMPLETED: {chat.IsComplete}]");
}
private sealed class ApprovalTerminationStrategy : TerminationStrategy
diff --git a/dotnet/samples/GettingStartedWithAgents/Step4_KernelFunctionStrategies.cs b/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs
similarity index 85%
rename from dotnet/samples/GettingStartedWithAgents/Step4_KernelFunctionStrategies.cs
rename to dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs
index d71b6ae26767..f97c6e733421 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step4_KernelFunctionStrategies.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step04_KernelFunctionStrategies.cs
@@ -10,7 +10,7 @@ namespace GettingStarted;
/// Demonstrate usage of and
/// to manage execution.
///
-public class Step4_KernelFunctionStrategies(ITestOutputHelper output) : BaseTest(output)
+public class Step04_KernelFunctionStrategies(ITestOutputHelper output) : BaseAgentsTest(output)
{
private const string ReviewerName = "ArtDirector";
private const string ReviewerInstructions =
@@ -64,16 +64,17 @@ public async Task UseKernelFunctionStrategiesWithAgentGroupChatAsync()
KernelFunction selectionFunction =
KernelFunctionFactory.CreateFromPrompt(
$$$"""
- Your job is to determine which participant takes the next turn in a conversation according to the action of the most recent participant.
+ Determine which participant takes the next turn in a conversation based on the the most recent participant.
State only the name of the participant to take the next turn.
+ No participant should take more than one turn in a row.
Choose only from these participants:
- {{{ReviewerName}}}
- {{{CopyWriterName}}}
Always follow these rules when selecting the next participant:
- - After {{{CopyWriterName}}} replies, it is {{{ReviewerName}}}'s turn.
- - After {{{ReviewerName}}} provides feedback, it is {{{CopyWriterName}}}'s turn.
+ - After {{{CopyWriterName}}}, it is {{{ReviewerName}}}'s turn.
+ - After {{{ReviewerName}}}, it is {{{CopyWriterName}}}'s turn.
History:
{{$history}}
@@ -117,15 +118,15 @@ State only the name of the participant to take the next turn.
};
// Invoke chat and display messages.
- string input = "concept: maps made out of egg cartons.";
- chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input));
- Console.WriteLine($"# {AuthorRole.User}: '{input}'");
+ ChatMessageContent message = new(AuthorRole.User, "concept: maps made out of egg cartons.");
+ chat.AddChatMessage(message);
+ this.WriteAgentChatMessage(message);
- await foreach (ChatMessageContent content in chat.InvokeAsync())
+ await foreach (ChatMessageContent responese in chat.InvokeAsync())
{
- Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'");
+ this.WriteAgentChatMessage(responese);
}
- Console.WriteLine($"# IS COMPLETE: {chat.IsComplete}");
+ Console.WriteLine($"\n[IS COMPLETED: {chat.IsComplete}]");
}
}
diff --git a/dotnet/samples/GettingStartedWithAgents/Step5_JsonResult.cs b/dotnet/samples/GettingStartedWithAgents/Step05_JsonResult.cs
similarity index 79%
rename from dotnet/samples/GettingStartedWithAgents/Step5_JsonResult.cs
rename to dotnet/samples/GettingStartedWithAgents/Step05_JsonResult.cs
index 20ad4c2096d4..8806c7d3b62d 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step5_JsonResult.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step05_JsonResult.cs
@@ -10,14 +10,14 @@ namespace GettingStarted;
///
/// Demonstrate parsing JSON response.
///
-public class Step5_JsonResult(ITestOutputHelper output) : BaseTest(output)
+public class Step05_JsonResult(ITestOutputHelper output) : BaseAgentsTest(output)
{
private const int ScoreCompletionThreshold = 70;
private const string TutorName = "Tutor";
private const string TutorInstructions =
"""
- Think step-by-step and rate the user input on creativity and expressivness from 1-100.
+ Think step-by-step and rate the user input on creativity and expressiveness from 1-100.
Respond in JSON format with the following JSON schema:
@@ -60,19 +60,20 @@ public async Task UseKernelFunctionStrategiesWithJsonResultAsync()
// Local function to invoke agent and display the conversation messages.
async Task InvokeAgentAsync(string input)
{
- chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input));
+ ChatMessageContent message = new(AuthorRole.User, input);
+ chat.AddChatMessage(message);
+ this.WriteAgentChatMessage(message);
- Console.WriteLine($"# {AuthorRole.User}: '{input}'");
-
- await foreach (ChatMessageContent content in chat.InvokeAsync(agent))
+ await foreach (ChatMessageContent response in chat.InvokeAsync(agent))
{
- Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'");
- Console.WriteLine($"# IS COMPLETE: {chat.IsComplete}");
+ this.WriteAgentChatMessage(response);
+
+ Console.WriteLine($"[IS COMPLETED: {chat.IsComplete}]");
}
}
}
- private record struct InputScore(int score, string notes);
+ private record struct WritingScore(int score, string notes);
private sealed class ThresholdTerminationStrategy : TerminationStrategy
{
@@ -80,7 +81,7 @@ protected override Task ShouldAgentTerminateAsync(Agent agent, IReadOnlyLi
{
string lastMessageContent = history[history.Count - 1].Content ?? string.Empty;
- InputScore? result = JsonResultTranslator.Translate(lastMessageContent);
+ WritingScore? result = JsonResultTranslator.Translate(lastMessageContent);
return Task.FromResult((result?.score ?? 0) >= ScoreCompletionThreshold);
}
diff --git a/dotnet/samples/GettingStartedWithAgents/Step6_DependencyInjection.cs b/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs
similarity index 65%
rename from dotnet/samples/GettingStartedWithAgents/Step6_DependencyInjection.cs
rename to dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs
index 21af5db70dce..a0d32f8cefba 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step6_DependencyInjection.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step06_DependencyInjection.cs
@@ -3,23 +3,19 @@
using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents;
-using Microsoft.SemanticKernel.Agents.Chat;
using Microsoft.SemanticKernel.ChatCompletion;
-using Resources;
namespace GettingStarted;
///
/// Demonstrate creation of an agent via dependency injection.
///
-public class Step6_DependencyInjection(ITestOutputHelper output) : BaseTest(output)
+public class Step06_DependencyInjection(ITestOutputHelper output) : BaseAgentsTest(output)
{
- private const int ScoreCompletionThreshold = 70;
-
private const string TutorName = "Tutor";
private const string TutorInstructions =
"""
- Think step-by-step and rate the user input on creativity and expressivness from 1-100.
+ Think step-by-step and rate the user input on creativity and expressiveness from 1-100.
Respond in JSON format with the following JSON schema:
@@ -80,50 +76,27 @@ public async Task UseDependencyInjectionToCreateAgentAsync()
// Local function to invoke agent and display the conversation messages.
async Task WriteAgentResponse(string input)
{
- Console.WriteLine($"# {AuthorRole.User}: {input}");
+ ChatMessageContent message = new(AuthorRole.User, input);
+ this.WriteAgentChatMessage(message);
- await foreach (ChatMessageContent content in agentClient.RunDemoAsync(input))
+ await foreach (ChatMessageContent response in agentClient.RunDemoAsync(message))
{
- Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'");
+ this.WriteAgentChatMessage(response);
}
}
}
private sealed class AgentClient([FromKeyedServices(TutorName)] ChatCompletionAgent agent)
{
- private readonly AgentGroupChat _chat =
- new()
- {
- ExecutionSettings =
- new()
- {
- // Here a TerminationStrategy subclass is used that will terminate when
- // the response includes a score that is greater than or equal to 70.
- TerminationStrategy = new ThresholdTerminationStrategy()
- }
- };
-
- public IAsyncEnumerable RunDemoAsync(string input)
- {
- // Create a chat for agent interaction.
+ private readonly AgentGroupChat _chat = new();
- this._chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input));
+ public IAsyncEnumerable RunDemoAsync(ChatMessageContent input)
+ {
+ this._chat.AddChatMessage(input);
return this._chat.InvokeAsync(agent);
}
}
- private record struct InputScore(int score, string notes);
-
- private sealed class ThresholdTerminationStrategy : TerminationStrategy
- {
- protected override Task ShouldAgentTerminateAsync(Agent agent, IReadOnlyList history, CancellationToken cancellationToken)
- {
- string lastMessageContent = history[history.Count - 1].Content ?? string.Empty;
-
- InputScore? result = JsonResultTranslator.Translate(lastMessageContent);
-
- return Task.FromResult((result?.score ?? 0) >= ScoreCompletionThreshold);
- }
- }
+ private record struct WritingScore(int score, string notes);
}
diff --git a/dotnet/samples/GettingStartedWithAgents/Step7_Logging.cs b/dotnet/samples/GettingStartedWithAgents/Step07_Logging.cs
similarity index 86%
rename from dotnet/samples/GettingStartedWithAgents/Step7_Logging.cs
rename to dotnet/samples/GettingStartedWithAgents/Step07_Logging.cs
index 1ab559e668fb..3a48d407dea9 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step7_Logging.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step07_Logging.cs
@@ -8,13 +8,13 @@
namespace GettingStarted;
///
-/// A repeat of with logging enabled via assignment
+/// A repeat of with logging enabled via assignment
/// of a to .
///
///
/// Samples become super noisy with logging always enabled.
///
-public class Step7_Logging(ITestOutputHelper output) : BaseTest(output)
+public class Step07_Logging(ITestOutputHelper output) : BaseAgentsTest(output)
{
private const string ReviewerName = "ArtDirector";
private const string ReviewerInstructions =
@@ -81,16 +81,16 @@ public async Task UseLoggerFactoryWithAgentGroupChatAsync()
};
// Invoke chat and display messages.
- string input = "concept: maps made out of egg cartons.";
- chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input));
- Console.WriteLine($"# {AuthorRole.User}: '{input}'");
+ ChatMessageContent input = new(AuthorRole.User, "concept: maps made out of egg cartons.");
+ chat.AddChatMessage(input);
+ this.WriteAgentChatMessage(input);
- await foreach (ChatMessageContent content in chat.InvokeAsync())
+ await foreach (ChatMessageContent response in chat.InvokeAsync())
{
- Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'");
+ this.WriteAgentChatMessage(response);
}
- Console.WriteLine($"# IS COMPLETE: {chat.IsComplete}");
+ Console.WriteLine($"\n[IS COMPLETED: {chat.IsComplete}]");
}
private sealed class ApprovalTerminationStrategy : TerminationStrategy
diff --git a/dotnet/samples/GettingStartedWithAgents/Step8_OpenAIAssistant.cs b/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs
similarity index 57%
rename from dotnet/samples/GettingStartedWithAgents/Step8_OpenAIAssistant.cs
rename to dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs
index d9e9760e3fa6..ba4ab065c2a6 100644
--- a/dotnet/samples/GettingStartedWithAgents/Step8_OpenAIAssistant.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step08_Assistant.cs
@@ -8,36 +8,35 @@
namespace GettingStarted;
///
-/// This example demonstrates that outside of initialization (and cleanup), using
-/// is no different from
-/// even with with a .
+/// This example demonstrates similarity between using
+/// and (see: Step 2).
///
-public class Step8_OpenAIAssistant(ITestOutputHelper output) : BaseTest(output)
+public class Step08_Assistant(ITestOutputHelper output) : BaseAgentsTest(output)
{
private const string HostName = "Host";
private const string HostInstructions = "Answer questions about the menu.";
[Fact]
- public async Task UseSingleOpenAIAssistantAgentAsync()
+ public async Task UseSingleAssistantAgentAsync()
{
// Define the agent
OpenAIAssistantAgent agent =
await OpenAIAssistantAgent.CreateAsync(
kernel: new(),
- config: new(this.ApiKey, this.Endpoint),
- new()
+ clientProvider: this.GetClientProvider(),
+ new(this.Model)
{
Instructions = HostInstructions,
Name = HostName,
- ModelId = this.Model,
+ Metadata = AssistantSampleMetadata,
});
// Initialize plugin and add to the agent's Kernel (same as direct Kernel usage).
KernelPlugin plugin = KernelPluginFactory.CreateFromType();
agent.Kernel.Plugins.Add(plugin);
- // Create a thread for the agent interaction.
- string threadId = await agent.CreateThreadAsync();
+ // Create a thread for the agent conversation.
+ string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
// Respond to user input
try
@@ -56,45 +55,32 @@ await OpenAIAssistantAgent.CreateAsync(
// Local function to invoke agent and display the conversation messages.
async Task InvokeAgentAsync(string input)
{
- await agent.AddChatMessageAsync(threadId, new ChatMessageContent(AuthorRole.User, input));
+ ChatMessageContent message = new(AuthorRole.User, input);
+ await agent.AddChatMessageAsync(threadId, message);
+ this.WriteAgentChatMessage(message);
- Console.WriteLine($"# {AuthorRole.User}: '{input}'");
-
- await foreach (ChatMessageContent content in agent.InvokeAsync(threadId))
+ await foreach (ChatMessageContent response in agent.InvokeAsync(threadId))
{
- if (content.Role != AuthorRole.Tool)
- {
- Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'");
- }
+ this.WriteAgentChatMessage(response);
}
}
}
private sealed class MenuPlugin
{
- public const string CorrelationIdArgument = "correlationId";
-
- private readonly List _correlationIds = [];
-
- public IReadOnlyList CorrelationIds => this._correlationIds;
-
[KernelFunction, Description("Provides a list of specials from the menu.")]
[System.Diagnostics.CodeAnalysis.SuppressMessage("Design", "CA1024:Use properties where appropriate", Justification = "Too smart")]
- public string GetSpecials()
- {
- return @"
-Special Soup: Clam Chowder
-Special Salad: Cobb Salad
-Special Drink: Chai Tea
-";
- }
+ public string GetSpecials() =>
+ """
+ Special Soup: Clam Chowder
+ Special Salad: Cobb Salad
+ Special Drink: Chai Tea
+ """;
[KernelFunction, Description("Provides the price of the requested menu item.")]
public string GetItemPrice(
[Description("The name of the menu item.")]
- string menuItem)
- {
- return "$9.99";
- }
+ string menuItem) =>
+ "$9.99";
}
}
diff --git a/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs b/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs
new file mode 100644
index 000000000000..62845f2c4366
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/Step09_Assistant_Vision.cs
@@ -0,0 +1,74 @@
+// Copyright (c) Microsoft. All rights reserved.
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Resources;
+
+namespace GettingStarted;
+
+///
+/// Demonstrate providing image input to .
+///
+public class Step09_Assistant_Vision(ITestOutputHelper output) : BaseAgentsTest(output)
+{
+ ///
+ /// Azure currently only supports message of type=text.
+ ///
+ protected override bool ForceOpenAI => true;
+
+ [Fact]
+ public async Task UseSingleAssistantAgentAsync()
+ {
+ // Define the agent
+ OpenAIClientProvider provider = this.GetClientProvider();
+ OpenAIAssistantAgent agent =
+ await OpenAIAssistantAgent.CreateAsync(
+ kernel: new(),
+ provider,
+ new(this.Model)
+ {
+ Metadata = AssistantSampleMetadata,
+ });
+
+ // Upload an image
+ await using Stream imageStream = EmbeddedResource.ReadStream("cat.jpg")!;
+ string fileId = await agent.UploadFileAsync(imageStream, "cat.jpg");
+
+ // Create a thread for the agent conversation.
+ string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
+
+ // Respond to user input
+ try
+ {
+ // Refer to public image by url
+ await InvokeAgentAsync(CreateMessageWithImageUrl("Describe this image.", "https://upload.wikimedia.org/wikipedia/commons/thumb/4/47/New_york_times_square-terabass.jpg/1200px-New_york_times_square-terabass.jpg"));
+ await InvokeAgentAsync(CreateMessageWithImageUrl("What are is the main color in this image?", "https://upload.wikimedia.org/wikipedia/commons/5/56/White_shark.jpg"));
+ // Refer to uploaded image by file-id.
+ await InvokeAgentAsync(CreateMessageWithImageReference("Is there an animal in this image?", fileId));
+ }
+ finally
+ {
+ await agent.DeleteThreadAsync(threadId);
+ await agent.DeleteAsync();
+ await provider.Client.GetFileClient().DeleteFileAsync(fileId);
+ }
+
+ // Local function to invoke agent and display the conversation messages.
+ async Task InvokeAgentAsync(ChatMessageContent message)
+ {
+ await agent.AddChatMessageAsync(threadId, message);
+ this.WriteAgentChatMessage(message);
+
+ await foreach (ChatMessageContent response in agent.InvokeAsync(threadId))
+ {
+ this.WriteAgentChatMessage(response);
+ }
+ }
+ }
+
+ private ChatMessageContent CreateMessageWithImageUrl(string input, string url)
+ => new(AuthorRole.User, [new TextContent(input), new ImageContent(new Uri(url))]);
+
+ private ChatMessageContent CreateMessageWithImageReference(string input, string fileId)
+ => new(AuthorRole.User, [new TextContent(input), new FileReferenceContent(fileId)]);
+}
diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_CodeInterpreter.cs b/dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter.cs
similarity index 50%
rename from dotnet/samples/Concepts/Agents/OpenAIAssistant_CodeInterpreter.cs
rename to dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter.cs
index 75b237489025..1205771d66be 100644
--- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_CodeInterpreter.cs
+++ b/dotnet/samples/GettingStartedWithAgents/Step10_AssistantTool_CodeInterpreter.cs
@@ -1,34 +1,31 @@
// Copyright (c) Microsoft. All rights reserved.
using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
-namespace Agents;
+namespace GettingStarted;
///
/// Demonstrate using code-interpreter on .
///
-public class OpenAIAssistant_CodeInterpreter(ITestOutputHelper output) : BaseTest(output)
+public class Step10_AssistantTool_CodeInterpreter(ITestOutputHelper output) : BaseAgentsTest(output)
{
- protected override bool ForceOpenAI => true;
-
[Fact]
- public async Task UseCodeInterpreterToolWithOpenAIAssistantAgentAsync()
+ public async Task UseCodeInterpreterToolWithAssistantAgentAsync()
{
// Define the agent
OpenAIAssistantAgent agent =
await OpenAIAssistantAgent.CreateAsync(
kernel: new(),
- config: new(this.ApiKey, this.Endpoint),
- new()
+ clientProvider: this.GetClientProvider(),
+ new(this.Model)
{
- EnableCodeInterpreter = true, // Enable code-interpreter
- ModelId = this.Model,
+ EnableCodeInterpreter = true,
+ Metadata = AssistantSampleMetadata,
});
- // Create a chat for agent interaction.
- AgentGroupChat chat = new();
+ // Create a thread for the agent conversation.
+ string threadId = await agent.CreateThreadAsync(new OpenAIThreadCreationOptions { Metadata = AssistantSampleMetadata });
// Respond to user input
try
@@ -37,19 +34,20 @@ await OpenAIAssistantAgent.CreateAsync(
}
finally
{
+ await agent.DeleteThreadAsync(threadId);
await agent.DeleteAsync();
}
// Local function to invoke agent and display the conversation messages.
async Task InvokeAgentAsync(string input)
{
- chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input));
-
- Console.WriteLine($"# {AuthorRole.User}: '{input}'");
+ ChatMessageContent message = new(AuthorRole.User, input);
+ await agent.AddChatMessageAsync(threadId, message);
+ this.WriteAgentChatMessage(message);
- await foreach (var content in chat.InvokeAsync(agent))
+ await foreach (ChatMessageContent response in agent.InvokeAsync(threadId))
{
- Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'");
+ this.WriteAgentChatMessage(response);
}
}
}
diff --git a/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs b/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs
new file mode 100644
index 000000000000..70985d0fc27b
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/Step11_AssistantTool_FileSearch.cs
@@ -0,0 +1,83 @@
+// Copyright (c) Microsoft. All rights reserved.
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Files;
+using OpenAI.VectorStores;
+using Resources;
+
+namespace GettingStarted;
+
+///
+/// Demonstrate using code-interpreter on .
+///
+public class Step11_AssistantTool_FileSearch(ITestOutputHelper output) : BaseAgentsTest(output)
+{
+ [Fact]
+ public async Task UseFileSearchToolWithAssistantAgentAsync()
+ {
+ // Define the agent
+ OpenAIClientProvider provider = this.GetClientProvider();
+ OpenAIAssistantAgent agent =
+ await OpenAIAssistantAgent.CreateAsync(
+ kernel: new(),
+ clientProvider: this.GetClientProvider(),
+ new(this.Model)
+ {
+ EnableFileSearch = true,
+ Metadata = AssistantSampleMetadata,
+ });
+
+ // Upload file - Using a table of fictional employees.
+ FileClient fileClient = provider.Client.GetFileClient();
+ await using Stream stream = EmbeddedResource.ReadStream("employees.pdf")!;
+ OpenAIFileInfo fileInfo = await fileClient.UploadFileAsync(stream, "employees.pdf", FileUploadPurpose.Assistants);
+
+ // Create a vector-store
+ VectorStoreClient vectorStoreClient = provider.Client.GetVectorStoreClient();
+ VectorStore vectorStore =
+ await vectorStoreClient.CreateVectorStoreAsync(
+ new VectorStoreCreationOptions()
+ {
+ FileIds = [fileInfo.Id],
+ Metadata = { { AssistantSampleMetadataKey, bool.TrueString } }
+ });
+
+ // Create a thread associated with a vector-store for the agent conversation.
+ string threadId =
+ await agent.CreateThreadAsync(
+ new OpenAIThreadCreationOptions
+ {
+ VectorStoreId = vectorStore.Id,
+ Metadata = AssistantSampleMetadata,
+ });
+
+ // Respond to user input
+ try
+ {
+ await InvokeAgentAsync("Who is the youngest employee?");
+ await InvokeAgentAsync("Who works in sales?");
+ await InvokeAgentAsync("I have a customer request, who can help me?");
+ }
+ finally
+ {
+ await agent.DeleteThreadAsync(threadId);
+ await agent.DeleteAsync(CancellationToken.None);
+ await vectorStoreClient.DeleteVectorStoreAsync(vectorStore);
+ await fileClient.DeleteFileAsync(fileInfo.Id);
+ }
+
+ // Local function to invoke agent and display the conversation messages.
+ async Task InvokeAgentAsync(string input)
+ {
+ ChatMessageContent message = new(AuthorRole.User, input);
+ await agent.AddChatMessageAsync(threadId, message);
+ this.WriteAgentChatMessage(message);
+
+ await foreach (ChatMessageContent response in agent.InvokeAsync(threadId))
+ {
+ this.WriteAgentChatMessage(response);
+ }
+ }
+ }
+}
diff --git a/dotnet/samples/LearnResources/LearnResources.csproj b/dotnet/samples/LearnResources/LearnResources.csproj
index d210f8effa91..d639fc8a0cee 100644
--- a/dotnet/samples/LearnResources/LearnResources.csproj
+++ b/dotnet/samples/LearnResources/LearnResources.csproj
@@ -51,6 +51,7 @@
+
@@ -68,6 +69,6 @@
-
+
\ No newline at end of file
diff --git a/dotnet/samples/LearnResources/MicrosoftLearn/AIServices.cs b/dotnet/samples/LearnResources/MicrosoftLearn/AIServices.cs
index a56e6591f8ad..d957358cac77 100644
--- a/dotnet/samples/LearnResources/MicrosoftLearn/AIServices.cs
+++ b/dotnet/samples/LearnResources/MicrosoftLearn/AIServices.cs
@@ -45,25 +45,11 @@ public async Task RunAsync()
.Build();
//
- // You could instead create a kernel with a legacy Azure OpenAI text completion service
- //
- kernel = Kernel.CreateBuilder()
- .AddAzureOpenAITextGeneration(textModelId, endpoint, apiKey)
- .Build();
- //
-
// You can also create a kernel with a (non-Azure) OpenAI chat completion service
//
kernel = Kernel.CreateBuilder()
.AddOpenAIChatCompletion(openAImodelId, openAIapiKey)
.Build();
//
-
- // Or a kernel with a legacy OpenAI text completion service
- //
- kernel = Kernel.CreateBuilder()
- .AddOpenAITextGeneration(openAItextModelId, openAIapiKey)
- .Build();
- //
}
}
diff --git a/dotnet/src/Agents/Abstractions/AgentChannel.cs b/dotnet/src/Agents/Abstractions/AgentChannel.cs
index 34f7a8030896..3c239360468d 100644
--- a/dotnet/src/Agents/Abstractions/AgentChannel.cs
+++ b/dotnet/src/Agents/Abstractions/AgentChannel.cs
@@ -40,6 +40,10 @@ public abstract class AgentChannel
/// The agent actively interacting with the chat.
/// The to monitor for cancellation requests. The default is .
/// Asynchronous enumeration of messages.
+ ///
+ /// In the enumeration returned by this method, a message is considered visible if it is intended to be displayed to the user.
+ /// Example of a non-visible message is function-content for functions that are automatically executed.
+ ///
protected internal abstract IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync(
Agent agent,
CancellationToken cancellationToken = default);
@@ -68,6 +72,10 @@ public abstract class AgentChannel : AgentChannel where TAgent : Agent
/// The agent actively interacting with the chat.
/// The to monitor for cancellation requests. The default is .
/// Asynchronous enumeration of messages.
+ ///
+ /// In the enumeration returned by this method, a message is considered visible if it is intended to be displayed to the user.
+ /// Example of a non-visible message is function-content for functions that are automatically executed.
+ ///
protected internal abstract IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync(
TAgent agent,
CancellationToken cancellationToken = default);
diff --git a/dotnet/src/Agents/Abstractions/AgentChat.cs b/dotnet/src/Agents/Abstractions/AgentChat.cs
index cdc46024ece7..6813e98f4c1d 100644
--- a/dotnet/src/Agents/Abstractions/AgentChat.cs
+++ b/dotnet/src/Agents/Abstractions/AgentChat.cs
@@ -308,7 +308,7 @@ private void ClearActivitySignal()
/// The activity signal is used to manage ability and visibility for taking actions based
/// on conversation history.
///
- private void SetActivityOrThrow()
+ protected void SetActivityOrThrow()
{
// Note: Interlocked is the absolute lightest synchronization mechanism available in dotnet.
int wasActive = Interlocked.CompareExchange(ref this._isActive, 1, 0);
diff --git a/dotnet/src/Agents/Abstractions/AggregatorChannel.cs b/dotnet/src/Agents/Abstractions/AggregatorChannel.cs
index c7123abf9b71..dfd33bd53299 100644
--- a/dotnet/src/Agents/Abstractions/AggregatorChannel.cs
+++ b/dotnet/src/Agents/Abstractions/AggregatorChannel.cs
@@ -13,11 +13,13 @@ internal sealed class AggregatorChannel(AgentChat chat) : AgentChannel
protected internal override IAsyncEnumerable GetHistoryAsync(CancellationToken cancellationToken = default)
{
return this._chat.GetChatMessagesAsync(cancellationToken);
}
+ ///
protected internal override async IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync(AggregatorAgent agent, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
ChatMessageContent? lastMessage = null;
diff --git a/dotnet/src/Agents/Abstractions/Logging/AgentChatLogMessages.cs b/dotnet/src/Agents/Abstractions/Logging/AgentChatLogMessages.cs
index 314d68ce8cd8..b971fe2ce8d4 100644
--- a/dotnet/src/Agents/Abstractions/Logging/AgentChatLogMessages.cs
+++ b/dotnet/src/Agents/Abstractions/Logging/AgentChatLogMessages.cs
@@ -61,7 +61,7 @@ public static partial void LogAgentChatAddingMessages(
[LoggerMessage(
EventId = 0,
Level = LogLevel.Information,
- Message = "[{MethodName}] Adding Messages: {MessageCount}.")]
+ Message = "[{MethodName}] Added Messages: {MessageCount}.")]
public static partial void LogAgentChatAddedMessages(
this ILogger logger,
string methodName,
diff --git a/dotnet/src/Agents/Core/ChatCompletionAgent.cs b/dotnet/src/Agents/Core/ChatCompletionAgent.cs
index 212c56038484..87e0e9c2a7cb 100644
--- a/dotnet/src/Agents/Core/ChatCompletionAgent.cs
+++ b/dotnet/src/Agents/Core/ChatCompletionAgent.cs
@@ -27,7 +27,7 @@ public override async IAsyncEnumerable InvokeAsync(
kernel ??= this.Kernel;
arguments ??= this.Arguments;
- (IChatCompletionService chatCompletionService, PromptExecutionSettings? executionSettings) = this.GetChatCompletionService(kernel, arguments);
+ (IChatCompletionService chatCompletionService, PromptExecutionSettings? executionSettings) = GetChatCompletionService(kernel, arguments);
ChatHistory chat = this.SetupAgentChatHistory(history);
@@ -54,7 +54,7 @@ await chatCompletionService.GetChatMessageContentsAsync(
history.Add(message);
}
- foreach (ChatMessageContent message in messages ?? [])
+ foreach (ChatMessageContent message in messages)
{
message.AuthorName = this.Name;
@@ -72,7 +72,7 @@ public override async IAsyncEnumerable InvokeStream
kernel ??= this.Kernel;
arguments ??= this.Arguments;
- (IChatCompletionService chatCompletionService, PromptExecutionSettings? executionSettings) = this.GetChatCompletionService(kernel, arguments);
+ (IChatCompletionService chatCompletionService, PromptExecutionSettings? executionSettings) = GetChatCompletionService(kernel, arguments);
ChatHistory chat = this.SetupAgentChatHistory(history);
@@ -107,7 +107,7 @@ public override async IAsyncEnumerable InvokeStream
}
}
- private (IChatCompletionService service, PromptExecutionSettings? executionSettings) GetChatCompletionService(Kernel kernel, KernelArguments? arguments)
+ internal static (IChatCompletionService service, PromptExecutionSettings? executionSettings) GetChatCompletionService(Kernel kernel, KernelArguments? arguments)
{
// Need to provide a KernelFunction to the service selector as a container for the execution-settings.
KernelFunction nullPrompt = KernelFunctionFactory.CreateFromPrompt("placeholder", arguments?.ExecutionSettings?.Values);
diff --git a/dotnet/src/Agents/Core/History/ChatHistorySummarizationReducer.cs b/dotnet/src/Agents/Core/History/ChatHistorySummarizationReducer.cs
index a45bfa57011d..8c2f022830d1 100644
--- a/dotnet/src/Agents/Core/History/ChatHistorySummarizationReducer.cs
+++ b/dotnet/src/Agents/Core/History/ChatHistorySummarizationReducer.cs
@@ -80,7 +80,7 @@ Provide a concise and complete summarizion of the entire dialog that does not ex
IEnumerable summarizedHistory =
history.Extract(
this.UseSingleSummary ? 0 : insertionPoint,
- truncationIndex,
+ truncationIndex - 1,
(m) => m.Items.Any(i => i is FunctionCallContent || i is FunctionResultContent));
try
@@ -154,7 +154,9 @@ public override bool Equals(object? obj)
ChatHistorySummarizationReducer? other = obj as ChatHistorySummarizationReducer;
return other != null &&
this._thresholdCount == other._thresholdCount &&
- this._targetCount == other._targetCount;
+ this._targetCount == other._targetCount &&
+ this.UseSingleSummary == other.UseSingleSummary &&
+ string.Equals(this.SummarizationInstructions, other.SummarizationInstructions, StringComparison.Ordinal);
}
///
diff --git a/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj b/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj
index 222ea5c5be88..a5a4cde76d6f 100644
--- a/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj
+++ b/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj
@@ -19,6 +19,7 @@
+
@@ -32,7 +33,7 @@
-
+
diff --git a/dotnet/src/Agents/OpenAI/Extensions/AuthorRoleExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/AuthorRoleExtensions.cs
index cd4e80c3abf1..895482927515 100644
--- a/dotnet/src/Agents/OpenAI/Extensions/AuthorRoleExtensions.cs
+++ b/dotnet/src/Agents/OpenAI/Extensions/AuthorRoleExtensions.cs
@@ -1,6 +1,6 @@
// Copyright (c) Microsoft. All rights reserved.
-using Azure.AI.OpenAI.Assistants;
using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
namespace Microsoft.SemanticKernel.Agents.OpenAI;
diff --git a/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs
index 9665fb680498..c4acca58770f 100644
--- a/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs
+++ b/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs
@@ -2,7 +2,7 @@
using System;
using System.Collections.Generic;
using System.Linq;
-using Azure.AI.OpenAI.Assistants;
+using OpenAI.Assistants;
namespace Microsoft.SemanticKernel.Agents.OpenAI;
@@ -13,9 +13,8 @@ internal static class KernelFunctionExtensions
///
/// The source function
/// The plugin name
- /// The delimiter character
/// An OpenAI tool definition
- public static FunctionToolDefinition ToToolDefinition(this KernelFunction function, string pluginName, string delimiter)
+ public static FunctionToolDefinition ToToolDefinition(this KernelFunction function, string pluginName)
{
var metadata = function.Metadata;
if (metadata.Parameters.Count > 0)
@@ -47,10 +46,17 @@ public static FunctionToolDefinition ToToolDefinition(this KernelFunction functi
required,
};
- return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName, delimiter), function.Description, BinaryData.FromObjectAsJson(spec));
+ return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName))
+ {
+ Description = function.Description,
+ Parameters = BinaryData.FromObjectAsJson(spec)
+ };
}
- return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName, delimiter), function.Description);
+ return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName))
+ {
+ Description = function.Description
+ };
}
private static string ConvertType(Type? type)
diff --git a/dotnet/src/Agents/OpenAI/Azure/AddHeaderRequestPolicy.cs b/dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs
similarity index 87%
rename from dotnet/src/Agents/OpenAI/Azure/AddHeaderRequestPolicy.cs
rename to dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs
index 084e533fe757..d017fb403f23 100644
--- a/dotnet/src/Agents/OpenAI/Azure/AddHeaderRequestPolicy.cs
+++ b/dotnet/src/Agents/OpenAI/Internal/AddHeaderRequestPolicy.cs
@@ -2,7 +2,7 @@
using Azure.Core;
using Azure.Core.Pipeline;
-namespace Microsoft.SemanticKernel.Agents.OpenAI.Azure;
+namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal;
///
/// Helper class to inject headers into Azure SDK HTTP pipeline
diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantMessageFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantMessageFactory.cs
new file mode 100644
index 000000000000..4c31a1bcf291
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/Internal/AssistantMessageFactory.cs
@@ -0,0 +1,64 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using OpenAI.Assistants;
+
+namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal;
+
+///
+/// Factory for creating based on .
+/// Also able to produce .
+///
+///
+/// Improves testability.
+///
+internal static class AssistantMessageFactory
+{
+ ///
+ /// Produces based on .
+ ///
+ /// The message content.
+ public static MessageCreationOptions CreateOptions(ChatMessageContent message)
+ {
+ MessageCreationOptions options = new();
+
+ if (message.Metadata != null)
+ {
+ foreach (var metadata in message.Metadata)
+ {
+ options.Metadata.Add(metadata.Key, metadata.Value?.ToString() ?? string.Empty);
+ }
+ }
+
+ return options;
+ }
+
+ ///
+ /// Translates into enumeration of .
+ ///
+ /// The message content.
+ public static IEnumerable GetMessageContents(ChatMessageContent message)
+ {
+ foreach (KernelContent content in message.Items)
+ {
+ if (content is TextContent textContent)
+ {
+ yield return MessageContent.FromText(content.ToString());
+ }
+ else if (content is ImageContent imageContent)
+ {
+ if (imageContent.Uri != null)
+ {
+ yield return MessageContent.FromImageUrl(imageContent.Uri);
+ }
+ else if (string.IsNullOrWhiteSpace(imageContent.DataUri))
+ {
+ yield return MessageContent.FromImageUrl(new(imageContent.DataUri!));
+ }
+ }
+ else if (content is FileReferenceContent fileContent)
+ {
+ yield return MessageContent.FromImageFileId(fileContent.FileId);
+ }
+ }
+ }
+}
diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs
new file mode 100644
index 000000000000..981c646254af
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/Internal/AssistantRunOptionsFactory.cs
@@ -0,0 +1,53 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using OpenAI.Assistants;
+
+namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal;
+
+///
+/// Factory for creating definition.
+///
+///
+/// Improves testability.
+///
+internal static class AssistantRunOptionsFactory
+{
+ ///
+ /// Produce by reconciling and .
+ ///
+ /// The assistant definition
+ /// The run specific options
+ public static RunCreationOptions GenerateOptions(OpenAIAssistantDefinition definition, OpenAIAssistantInvocationOptions? invocationOptions)
+ {
+ int? truncationMessageCount = ResolveExecutionSetting(invocationOptions?.TruncationMessageCount, definition.ExecutionOptions?.TruncationMessageCount);
+
+ RunCreationOptions options =
+ new()
+ {
+ MaxCompletionTokens = ResolveExecutionSetting(invocationOptions?.MaxCompletionTokens, definition.ExecutionOptions?.MaxCompletionTokens),
+ MaxPromptTokens = ResolveExecutionSetting(invocationOptions?.MaxPromptTokens, definition.ExecutionOptions?.MaxPromptTokens),
+ ModelOverride = invocationOptions?.ModelName,
+ NucleusSamplingFactor = ResolveExecutionSetting(invocationOptions?.TopP, definition.TopP),
+ ParallelToolCallsEnabled = ResolveExecutionSetting(invocationOptions?.ParallelToolCallsEnabled, definition.ExecutionOptions?.ParallelToolCallsEnabled),
+ ResponseFormat = ResolveExecutionSetting(invocationOptions?.EnableJsonResponse, definition.EnableJsonResponse) ?? false ? AssistantResponseFormat.JsonObject : null,
+ Temperature = ResolveExecutionSetting(invocationOptions?.Temperature, definition.Temperature),
+ TruncationStrategy = truncationMessageCount.HasValue ? RunTruncationStrategy.CreateLastMessagesStrategy(truncationMessageCount.Value) : null,
+ };
+
+ if (invocationOptions?.Metadata != null)
+ {
+ foreach (var metadata in invocationOptions.Metadata)
+ {
+ options.Metadata.Add(metadata.Key, metadata.Value ?? string.Empty);
+ }
+ }
+
+ return options;
+ }
+
+ private static TValue? ResolveExecutionSetting(TValue? setting, TValue? agentSetting) where TValue : struct
+ =>
+ setting.HasValue && (!agentSetting.HasValue || !EqualityComparer.Default.Equals(setting.Value, agentSetting.Value)) ?
+ setting.Value :
+ null;
+}
diff --git a/dotnet/src/Agents/OpenAI/AssistantThreadActions.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs
similarity index 70%
rename from dotnet/src/Agents/OpenAI/AssistantThreadActions.cs
rename to dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs
index cfc7a905cfc7..06c49f7a1905 100644
--- a/dotnet/src/Agents/OpenAI/AssistantThreadActions.cs
+++ b/dotnet/src/Agents/OpenAI/Internal/AssistantThreadActions.cs
@@ -1,4 +1,5 @@
// Copyright (c) Microsoft. All rights reserved.
+using System.ClientModel;
using System.Collections.Generic;
using System.Linq;
using System.Net;
@@ -7,19 +8,18 @@
using System.Threading;
using System.Threading.Tasks;
using Azure;
-using Azure.AI.OpenAI.Assistants;
using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI;
+using OpenAI.Assistants;
-namespace Microsoft.SemanticKernel.Agents.OpenAI;
+namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal;
///
/// Actions associated with an Open Assistant thread.
///
internal static class AssistantThreadActions
{
- private const string FunctionDelimiter = "-";
-
private static readonly HashSet s_pollingStatuses =
[
RunStatus.Queued,
@@ -34,6 +34,45 @@ internal static class AssistantThreadActions
RunStatus.Cancelled,
];
+ ///
+ /// Create a new assistant thread.
+ ///
+ /// The assistant client
+ /// The options for creating the thread
+ /// The to monitor for cancellation requests. The default is .
+ /// The thread identifier
+ public static async Task CreateThreadAsync(AssistantClient client, OpenAIThreadCreationOptions? options, CancellationToken cancellationToken = default)
+ {
+ ThreadCreationOptions createOptions =
+ new()
+ {
+ ToolResources = AssistantToolResourcesFactory.GenerateToolResources(options?.VectorStoreId, options?.CodeInterpreterFileIds),
+ };
+
+ if (options?.Messages is not null)
+ {
+ foreach (ChatMessageContent message in options.Messages)
+ {
+ ThreadInitializationMessage threadMessage = new(
+ role: message.Role == AuthorRole.User ? MessageRole.User : MessageRole.Assistant,
+ content: AssistantMessageFactory.GetMessageContents(message));
+ createOptions.InitialMessages.Add(threadMessage);
+ }
+ }
+
+ if (options?.Metadata != null)
+ {
+ foreach (KeyValuePair item in options.Metadata)
+ {
+ createOptions.Metadata[item.Key] = item.Value;
+ }
+ }
+
+ AssistantThread thread = await client.CreateThreadAsync(createOptions, cancellationToken).ConfigureAwait(false);
+
+ return thread.Id;
+ }
+
///
/// Create a message in the specified thread.
///
@@ -42,18 +81,21 @@ internal static class AssistantThreadActions
/// The message to add
/// The to monitor for cancellation requests. The default is .
/// if a system message is present, without taking any other action
- public static async Task CreateMessageAsync(AssistantsClient client, string threadId, ChatMessageContent message, CancellationToken cancellationToken)
+ public static async Task CreateMessageAsync(AssistantClient client, string threadId, ChatMessageContent message, CancellationToken cancellationToken)
{
if (message.Items.Any(i => i is FunctionCallContent))
{
return;
}
+ MessageCreationOptions options = AssistantMessageFactory.CreateOptions(message);
+
await client.CreateMessageAsync(
threadId,
- message.Role.ToMessageRole(),
- message.Content,
- cancellationToken: cancellationToken).ConfigureAwait(false);
+ message.Role == AuthorRole.User ? MessageRole.User : MessageRole.Assistant,
+ AssistantMessageFactory.GetMessageContents(message),
+ options,
+ cancellationToken).ConfigureAwait(false);
}
///
@@ -63,18 +105,16 @@ await client.CreateMessageAsync(
/// The thread identifier
/// The to monitor for cancellation requests. The default is .
/// Asynchronous enumeration of messages.
- public static async IAsyncEnumerable GetMessagesAsync(AssistantsClient client, string threadId, [EnumeratorCancellation] CancellationToken cancellationToken)
+ public static async IAsyncEnumerable GetMessagesAsync(AssistantClient client, string threadId, [EnumeratorCancellation] CancellationToken cancellationToken)
{
Dictionary agentNames = []; // Cache agent names by their identifier
- PageableList messages;
-
- string? lastId = null;
- do
+ await foreach (PageResult page in client.GetMessagesAsync(threadId, new() { Order = ListOrder.NewestFirst }, cancellationToken).ConfigureAwait(false))
{
- messages = await client.GetMessagesAsync(threadId, limit: 100, ListSortOrder.Descending, after: lastId, null, cancellationToken).ConfigureAwait(false);
- foreach (ThreadMessage message in messages)
+ foreach (var message in page.Values)
{
+ AuthorRole role = new(message.Role.ToString());
+
string? assistantName = null;
if (!string.IsNullOrWhiteSpace(message.AssistantId) &&
!agentNames.TryGetValue(message.AssistantId, out assistantName))
@@ -94,20 +134,19 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist
{
yield return content;
}
-
- lastId = message.Id;
}
}
- while (messages.HasMore);
}
///
/// Invoke the assistant on the specified thread.
+ /// In the enumeration returned by this method, a message is considered visible if it is intended to be displayed to the user.
+ /// Example of a non-visible message is function-content for functions that are automatically executed.
///
/// The assistant agent to interact with the thread.
/// The assistant client
/// The thread identifier
- /// Config to utilize when polling for run state.
+ /// Options to utilize for the invocation
/// The logger to utilize (might be agent or channel scoped)
/// The plugins and other state.
/// Optional arguments to pass to the agents's invocation, including any .
@@ -118,9 +157,9 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist
///
public static async IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync(
OpenAIAssistantAgent agent,
- AssistantsClient client,
+ AssistantClient client,
string threadId,
- OpenAIAssistantConfiguration.PollingConfiguration pollingConfiguration,
+ OpenAIAssistantInvocationOptions? invocationOptions,
ILogger logger,
Kernel kernel,
KernelArguments? arguments,
@@ -131,19 +170,15 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist
throw new KernelException($"Agent Failure - {nameof(OpenAIAssistantAgent)} agent is deleted: {agent.Id}.");
}
- ToolDefinition[]? tools = [.. agent.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name, FunctionDelimiter)))];
-
logger.LogOpenAIAssistantCreatingRun(nameof(InvokeAsync), threadId);
- CreateRunOptions options =
- new(agent.Id)
- {
- OverrideInstructions = agent.Instructions,
- OverrideTools = tools,
- };
+ ToolDefinition[]? tools = [.. agent.Tools, .. kernel.Plugins.SelectMany(p => p.Select(f => f.ToToolDefinition(p.Name)))];
+
+ RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(agent.Definition, invocationOptions);
+
+ options.ToolsOverride.AddRange(tools);
- // Create run
- ThreadRun run = await client.CreateRunAsync(threadId, options, cancellationToken).ConfigureAwait(false);
+ ThreadRun run = await client.CreateRunAsync(threadId, agent.Id, options, cancellationToken).ConfigureAwait(false);
logger.LogOpenAIAssistantCreatedRun(nameof(InvokeAsync), run.Id, threadId);
@@ -154,7 +189,7 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist
do
{
// Poll run and steps until actionable
- PageableList steps = await PollRunStatusAsync().ConfigureAwait(false);
+ await PollRunStatusAsync().ConfigureAwait(false);
// Is in terminal state?
if (s_terminalStatuses.Contains(run.Status))
@@ -162,13 +197,19 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist
throw new KernelException($"Agent Failure - Run terminated: {run.Status} [{run.Id}]: {run.LastError?.Message ?? "Unknown"}");
}
+ List steps = [];
+ await foreach (var page in client.GetRunStepsAsync(run).ConfigureAwait(false))
+ {
+ steps.AddRange(page.Values);
+ };
+
// Is tool action required?
if (run.Status == RunStatus.RequiresAction)
{
logger.LogOpenAIAssistantProcessingRunSteps(nameof(InvokeAsync), run.Id, threadId);
// Execute functions in parallel and post results at once.
- FunctionCallContent[] activeFunctionSteps = steps.Data.SelectMany(step => ParseFunctionStep(agent, step)).ToArray();
+ FunctionCallContent[] activeFunctionSteps = steps.SelectMany(step => ParseFunctionStep(agent, step)).ToArray();
if (activeFunctionSteps.Length > 0)
{
// Emit function-call content
@@ -183,7 +224,7 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist
// Process tool output
ToolOutput[] toolOutputs = GenerateToolOutputs(functionResults);
- await client.SubmitToolOutputsToRunAsync(run, toolOutputs, cancellationToken).ConfigureAwait(false);
+ await client.SubmitToolOutputsToRunAsync(threadId, run.Id, toolOutputs, cancellationToken).ConfigureAwait(false);
}
logger.LogOpenAIAssistantProcessedRunSteps(nameof(InvokeAsync), activeFunctionSteps.Length, run.Id, threadId);
@@ -200,26 +241,24 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist
int messageCount = 0;
foreach (RunStep completedStep in completedStepsToProcess)
{
- if (completedStep.Type.Equals(RunStepType.ToolCalls))
+ if (completedStep.Type == RunStepType.ToolCalls)
{
- RunStepToolCallDetails toolCallDetails = (RunStepToolCallDetails)completedStep.StepDetails;
-
- foreach (RunStepToolCall toolCall in toolCallDetails.ToolCalls)
+ foreach (RunStepToolCall toolCall in completedStep.Details.ToolCalls)
{
bool isVisible = false;
ChatMessageContent? content = null;
// Process code-interpreter content
- if (toolCall is RunStepCodeInterpreterToolCall toolCodeInterpreter)
+ if (toolCall.ToolKind == RunStepToolCallKind.CodeInterpreter)
{
- content = GenerateCodeInterpreterContent(agent.GetName(), toolCodeInterpreter);
+ content = GenerateCodeInterpreterContent(agent.GetName(), toolCall.CodeInterpreterInput);
isVisible = true;
}
// Process function result content
- else if (toolCall is RunStepFunctionToolCall toolFunction)
+ else if (toolCall.ToolKind == RunStepToolCallKind.Function)
{
- FunctionCallContent functionStep = functionSteps[toolFunction.Id]; // Function step always captured on invocation
- content = GenerateFunctionResultContent(agent.GetName(), functionStep, toolFunction.Output);
+ FunctionCallContent functionStep = functionSteps[toolCall.ToolCallId]; // Function step always captured on invocation
+ content = GenerateFunctionResultContent(agent.GetName(), functionStep, toolCall.FunctionOutput);
}
if (content is not null)
@@ -230,12 +269,10 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist
}
}
}
- else if (completedStep.Type.Equals(RunStepType.MessageCreation))
+ else if (completedStep.Type == RunStepType.MessageCreation)
{
- RunStepMessageCreationDetails messageCreationDetails = (RunStepMessageCreationDetails)completedStep.StepDetails;
-
// Retrieve the message
- ThreadMessage? message = await RetrieveMessageAsync(messageCreationDetails, cancellationToken).ConfigureAwait(false);
+ ThreadMessage? message = await RetrieveMessageAsync(completedStep.Details.CreatedMessageId, cancellationToken).ConfigureAwait(false);
if (message is not null)
{
@@ -260,7 +297,7 @@ public static async IAsyncEnumerable GetMessagesAsync(Assist
logger.LogOpenAIAssistantCompletedRun(nameof(InvokeAsync), run.Id, threadId);
// Local function to assist in run polling (participates in method closure).
- async Task> PollRunStatusAsync()
+ async Task PollRunStatusAsync()
{
logger.LogOpenAIAssistantPollingRunStatus(nameof(PollRunStatusAsync), run.Id, threadId);
@@ -269,7 +306,7 @@ async Task> PollRunStatusAsync()
do
{
// Reduce polling frequency after a couple attempts
- await Task.Delay(count >= 2 ? pollingConfiguration.RunPollingInterval : pollingConfiguration.RunPollingBackoff, cancellationToken).ConfigureAwait(false);
+ await Task.Delay(agent.PollingOptions.GetPollingInterval(count), cancellationToken).ConfigureAwait(false);
++count;
#pragma warning disable CA1031 // Do not catch general exception types
@@ -286,39 +323,37 @@ async Task> PollRunStatusAsync()
while (s_pollingStatuses.Contains(run.Status));
logger.LogOpenAIAssistantPolledRunStatus(nameof(PollRunStatusAsync), run.Status, run.Id, threadId);
-
- return await client.GetRunStepsAsync(run, cancellationToken: cancellationToken).ConfigureAwait(false);
}
// Local function to capture kernel function state for further processing (participates in method closure).
IEnumerable ParseFunctionStep(OpenAIAssistantAgent agent, RunStep step)
{
- if (step.Status == RunStepStatus.InProgress && step.StepDetails is RunStepToolCallDetails callDetails)
+ if (step.Status == RunStepStatus.InProgress && step.Type == RunStepType.ToolCalls)
{
- foreach (RunStepFunctionToolCall toolCall in callDetails.ToolCalls.OfType())
+ foreach (RunStepToolCall toolCall in step.Details.ToolCalls)
{
- var nameParts = FunctionName.Parse(toolCall.Name, FunctionDelimiter);
+ var nameParts = FunctionName.Parse(toolCall.FunctionName);
KernelArguments functionArguments = [];
- if (!string.IsNullOrWhiteSpace(toolCall.Arguments))
+ if (!string.IsNullOrWhiteSpace(toolCall.FunctionArguments))
{
- Dictionary arguments = JsonSerializer.Deserialize>(toolCall.Arguments)!;
+ Dictionary arguments = JsonSerializer.Deserialize>(toolCall.FunctionArguments)!;
foreach (var argumentKvp in arguments)
{
functionArguments[argumentKvp.Key] = argumentKvp.Value.ToString();
}
}
- var content = new FunctionCallContent(nameParts.Name, nameParts.PluginName, toolCall.Id, functionArguments);
+ var content = new FunctionCallContent(nameParts.Name, nameParts.PluginName, toolCall.ToolCallId, functionArguments);
- functionSteps.Add(toolCall.Id, content);
+ functionSteps.Add(toolCall.ToolCallId, content);
yield return content;
}
}
}
- async Task RetrieveMessageAsync(RunStepMessageCreationDetails detail, CancellationToken cancellationToken)
+ async Task RetrieveMessageAsync(string messageId, CancellationToken cancellationToken)
{
ThreadMessage? message = null;
@@ -328,7 +363,7 @@ IEnumerable ParseFunctionStep(OpenAIAssistantAgent agent, R
{
try
{
- message = await client.GetMessageAsync(threadId, detail.MessageCreation.MessageId, cancellationToken).ConfigureAwait(false);
+ message = await client.GetMessageAsync(threadId, messageId, cancellationToken).ConfigureAwait(false);
}
catch (RequestFailedException exception)
{
@@ -340,7 +375,7 @@ IEnumerable ParseFunctionStep(OpenAIAssistantAgent agent, R
if (retry)
{
- await Task.Delay(pollingConfiguration.MessageSynchronizationDelay, cancellationToken).ConfigureAwait(false);
+ await Task.Delay(agent.PollingOptions.MessageSynchronizationDelay, cancellationToken).ConfigureAwait(false);
}
++count;
@@ -361,57 +396,58 @@ private static ChatMessageContent GenerateMessageContent(string? assistantName,
AuthorName = assistantName,
};
- foreach (MessageContent itemContent in message.ContentItems)
+ foreach (MessageContent itemContent in message.Content)
{
// Process text content
- if (itemContent is MessageTextContent contentMessage)
+ if (!string.IsNullOrEmpty(itemContent.Text))
{
- content.Items.Add(new TextContent(contentMessage.Text.Trim()));
+ content.Items.Add(new TextContent(itemContent.Text));
- foreach (MessageTextAnnotation annotation in contentMessage.Annotations)
+ foreach (TextAnnotation annotation in itemContent.TextAnnotations)
{
content.Items.Add(GenerateAnnotationContent(annotation));
}
}
// Process image content
- else if (itemContent is MessageImageFileContent contentImage)
+ else if (itemContent.ImageFileId != null)
{
- content.Items.Add(new FileReferenceContent(contentImage.FileId));
+ content.Items.Add(new FileReferenceContent(itemContent.ImageFileId));
}
}
return content;
}
- private static AnnotationContent GenerateAnnotationContent(MessageTextAnnotation annotation)
+ private static AnnotationContent GenerateAnnotationContent(TextAnnotation annotation)
{
string? fileId = null;
- if (annotation is MessageTextFileCitationAnnotation citationAnnotation)
+
+ if (!string.IsNullOrEmpty(annotation.OutputFileId))
{
- fileId = citationAnnotation.FileId;
+ fileId = annotation.OutputFileId;
}
- else if (annotation is MessageTextFilePathAnnotation pathAnnotation)
+ else if (!string.IsNullOrEmpty(annotation.InputFileId))
{
- fileId = pathAnnotation.FileId;
+ fileId = annotation.InputFileId;
}
return
new()
{
- Quote = annotation.Text,
+ Quote = annotation.TextToReplace,
StartIndex = annotation.StartIndex,
EndIndex = annotation.EndIndex,
FileId = fileId,
};
}
- private static ChatMessageContent GenerateCodeInterpreterContent(string agentName, RunStepCodeInterpreterToolCall contentCodeInterpreter)
+ private static ChatMessageContent GenerateCodeInterpreterContent(string agentName, string pythonCode)
{
return
new ChatMessageContent(
AuthorRole.Assistant,
[
- new TextContent(contentCodeInterpreter.Input)
+ new TextContent(pythonCode)
])
{
AuthorName = agentName,
diff --git a/dotnet/src/Agents/OpenAI/Internal/AssistantToolResourcesFactory.cs b/dotnet/src/Agents/OpenAI/Internal/AssistantToolResourcesFactory.cs
new file mode 100644
index 000000000000..6874e1d21755
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/Internal/AssistantToolResourcesFactory.cs
@@ -0,0 +1,51 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using OpenAI.Assistants;
+
+namespace Microsoft.SemanticKernel.Agents.OpenAI.Internal;
+
+///
+/// Factory for creating definition.
+///
+///
+/// Improves testability.
+///
+internal static class AssistantToolResourcesFactory
+{
+ ///
+ /// Produces a definition based on the provided parameters.
+ ///
+ /// An optional vector-store-id for the 'file_search' tool
+ /// An optionallist of file-identifiers for the 'code_interpreter' tool.
+ public static ToolResources? GenerateToolResources(string? vectorStoreId, IReadOnlyList? codeInterpreterFileIds)
+ {
+ bool hasVectorStore = !string.IsNullOrWhiteSpace(vectorStoreId);
+ bool hasCodeInterpreterFiles = (codeInterpreterFileIds?.Count ?? 0) > 0;
+
+ ToolResources? toolResources = null;
+
+ if (hasVectorStore || hasCodeInterpreterFiles)
+ {
+ toolResources =
+ new ToolResources()
+ {
+ FileSearch =
+ hasVectorStore ?
+ new FileSearchToolResources()
+ {
+ VectorStoreIds = [vectorStoreId!],
+ } :
+ null,
+ CodeInterpreter =
+ hasCodeInterpreterFiles ?
+ new CodeInterpreterToolResources()
+ {
+ FileIds = (IList)codeInterpreterFileIds!,
+ } :
+ null,
+ };
+ }
+
+ return toolResources;
+ }
+}
diff --git a/dotnet/src/Agents/OpenAI/Logging/AssistantThreadActionsLogMessages.cs b/dotnet/src/Agents/OpenAI/Logging/AssistantThreadActionsLogMessages.cs
index bc7c8d9919f0..3a39c314c5c3 100644
--- a/dotnet/src/Agents/OpenAI/Logging/AssistantThreadActionsLogMessages.cs
+++ b/dotnet/src/Agents/OpenAI/Logging/AssistantThreadActionsLogMessages.cs
@@ -1,7 +1,8 @@
// Copyright (c) Microsoft. All rights reserved.
using System.Diagnostics.CodeAnalysis;
-using Azure.AI.OpenAI.Assistants;
using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.Agents.OpenAI.Internal;
+using OpenAI.Assistants;
namespace Microsoft.SemanticKernel.Agents.OpenAI;
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
index 6746c6c50d9a..28c8dba9e3a8 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantAgent.cs
@@ -1,17 +1,16 @@
// Copyright (c) Microsoft. All rights reserved.
-using System;
using System.Collections.Generic;
+using System.IO;
using System.Linq;
using System.Runtime.CompilerServices;
+using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
-using Azure;
-using Azure.AI.OpenAI.Assistants;
-using Azure.Core;
-using Azure.Core.Pipeline;
using Microsoft.Extensions.Logging;
-using Microsoft.SemanticKernel.Agents.OpenAI.Azure;
-using Microsoft.SemanticKernel.Http;
+using Microsoft.SemanticKernel.Agents.OpenAI.Internal;
+using OpenAI;
+using OpenAI.Assistants;
+using OpenAI.Files;
namespace Microsoft.SemanticKernel.Agents.OpenAI;
@@ -25,9 +24,12 @@ public sealed class OpenAIAssistantAgent : KernelAgent
///
public const string CodeInterpreterMetadataKey = "code";
+ internal const string OptionsMetadataKey = "__run_options";
+
+ private readonly OpenAIClientProvider _provider;
private readonly Assistant _assistant;
- private readonly AssistantsClient _client;
- private readonly OpenAIAssistantConfiguration _config;
+ private readonly AssistantClient _client;
+ private readonly string[] _channelKeys;
///
/// Optional arguments for the agent.
@@ -38,57 +40,55 @@ public sealed class OpenAIAssistantAgent : KernelAgent
public KernelArguments? Arguments { get; init; }
///
- /// A list of previously uploaded file IDs to attach to the assistant.
+ /// The assistant definition.
///
- public IReadOnlyList FileIds => this._assistant.FileIds;
+ public OpenAIAssistantDefinition Definition { get; private init; }
///
- /// A set of up to 16 key/value pairs that can be attached to an agent, used for
- /// storing additional information about that object in a structured format.Keys
- /// may be up to 64 characters in length and values may be up to 512 characters in length.
+ /// Set when the assistant has been deleted via .
+ /// An assistant removed by other means will result in an exception when invoked.
///
- public IReadOnlyDictionary Metadata => this._assistant.Metadata;
+ public bool IsDeleted { get; private set; }
///
- /// Expose predefined tools.
+ /// Defines polling behavior for run processing
///
- internal IReadOnlyList Tools => this._assistant.Tools;
+ public RunPollingOptions PollingOptions { get; } = new();
///
- /// Set when the assistant has been deleted via .
- /// An assistant removed by other means will result in an exception when invoked.
+ /// Expose predefined tools for run-processing.
///
- public bool IsDeleted { get; private set; }
+ internal IReadOnlyList Tools => this._assistant.Tools;
///
/// Define a new .
///
/// The containing services, plugins, and other state for use throughout the operation.
- /// Configuration for accessing the Assistants API service, such as the api-key.
+ /// OpenAI client provider for accessing the API service.
/// The assistant definition.
/// The to monitor for cancellation requests. The default is .
/// An instance
public static async Task CreateAsync(
Kernel kernel,
- OpenAIAssistantConfiguration config,
+ OpenAIClientProvider clientProvider,
OpenAIAssistantDefinition definition,
CancellationToken cancellationToken = default)
{
// Validate input
Verify.NotNull(kernel, nameof(kernel));
- Verify.NotNull(config, nameof(config));
+ Verify.NotNull(clientProvider, nameof(clientProvider));
Verify.NotNull(definition, nameof(definition));
// Create the client
- AssistantsClient client = CreateClient(config);
+ AssistantClient client = CreateClient(clientProvider);
// Create the assistant
AssistantCreationOptions assistantCreationOptions = CreateAssistantCreationOptions(definition);
- Assistant model = await client.CreateAssistantAsync(assistantCreationOptions, cancellationToken).ConfigureAwait(false);
+ Assistant model = await client.CreateAssistantAsync(definition.ModelId, assistantCreationOptions, cancellationToken).ConfigureAwait(false);
// Instantiate the agent
return
- new OpenAIAssistantAgent(client, model, config)
+ new OpenAIAssistantAgent(model, clientProvider, client)
{
Kernel = kernel,
};
@@ -97,79 +97,49 @@ public static async Task CreateAsync(
///
/// Retrieve a list of assistant definitions: .
///
- /// Configuration for accessing the Assistants API service, such as the api-key.
- /// The maximum number of assistant definitions to retrieve
- /// The identifier of the assistant beyond which to begin selection.
+ /// Configuration for accessing the API service.
/// The to monitor for cancellation requests. The default is .
/// An list of objects.
public static async IAsyncEnumerable ListDefinitionsAsync(
- OpenAIAssistantConfiguration config,
- int maxResults = 100,
- string? lastId = null,
+ OpenAIClientProvider provider,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
// Create the client
- AssistantsClient client = CreateClient(config);
-
- // Retrieve the assistants
- PageableList assistants;
+ AssistantClient client = CreateClient(provider);
- int resultCount = 0;
- do
+ // Query and enumerate assistant definitions
+ await foreach (var page in client.GetAssistantsAsync(new AssistantCollectionOptions() { Order = ListOrder.NewestFirst }, cancellationToken).ConfigureAwait(false))
{
- assistants = await client.GetAssistantsAsync(limit: Math.Min(maxResults, 100), ListSortOrder.Descending, after: lastId, cancellationToken: cancellationToken).ConfigureAwait(false);
- foreach (Assistant assistant in assistants)
+ foreach (Assistant model in page.Values)
{
- if (resultCount >= maxResults)
- {
- break;
- }
-
- resultCount++;
-
- yield return
- new()
- {
- Id = assistant.Id,
- Name = assistant.Name,
- Description = assistant.Description,
- Instructions = assistant.Instructions,
- EnableCodeInterpreter = assistant.Tools.Any(t => t is CodeInterpreterToolDefinition),
- EnableRetrieval = assistant.Tools.Any(t => t is RetrievalToolDefinition),
- FileIds = assistant.FileIds,
- Metadata = assistant.Metadata,
- ModelId = assistant.Model,
- };
-
- lastId = assistant.Id;
+ yield return CreateAssistantDefinition(model);
}
}
- while (assistants.HasMore && resultCount < maxResults);
}
///
/// Retrieve a by identifier.
///
/// The containing services, plugins, and other state for use throughout the operation.
- /// Configuration for accessing the Assistants API service, such as the api-key.
+ /// Configuration for accessing the API service.
/// The agent identifier
/// The to monitor for cancellation requests. The default is .
/// An instance
public static async Task RetrieveAsync(
Kernel kernel,
- OpenAIAssistantConfiguration config,
+ OpenAIClientProvider provider,
string id,
CancellationToken cancellationToken = default)
{
// Create the client
- AssistantsClient client = CreateClient(config);
+ AssistantClient client = CreateClient(provider);
// Retrieve the assistant
Assistant model = await client.GetAssistantAsync(id, cancellationToken).ConfigureAwait(false);
// Instantiate the agent
return
- new OpenAIAssistantAgent(client, model, config)
+ new OpenAIAssistantAgent(model, provider, client)
{
Kernel = kernel,
};
@@ -180,12 +150,17 @@ public static async Task RetrieveAsync(
///
/// The to monitor for cancellation requests. The default is .
/// The thread identifier
- public async Task CreateThreadAsync(CancellationToken cancellationToken = default)
- {
- AssistantThread thread = await this._client.CreateThreadAsync(cancellationToken).ConfigureAwait(false);
+ public Task CreateThreadAsync(CancellationToken cancellationToken = default)
+ => AssistantThreadActions.CreateThreadAsync(this._client, options: null, cancellationToken);
- return thread.Id;
- }
+ ///
+ /// Create a new assistant thread.
+ ///
+ /// The options for creating the thread
+ /// The to monitor for cancellation requests. The default is .
+ /// The thread identifier
+ public Task CreateThreadAsync(OpenAIThreadCreationOptions? options, CancellationToken cancellationToken = default)
+ => AssistantThreadActions.CreateThreadAsync(this._client, options, cancellationToken);
///
/// Create a new assistant thread.
@@ -203,6 +178,25 @@ public async Task DeleteThreadAsync(
return await this._client.DeleteThreadAsync(threadId, cancellationToken).ConfigureAwait(false);
}
+ ///
+ /// Uploads an file for the purpose of using with assistant.
+ ///
+ /// The content to upload
+ /// The name of the file
+ /// The to monitor for cancellation requests. The default is .
+ /// The file identifier
+ ///
+ /// Use the directly for more advanced file operations.
+ ///
+ public async Task UploadFileAsync(Stream stream, string name, CancellationToken cancellationToken = default)
+ {
+ FileClient client = this._provider.Client.GetFileClient();
+
+ OpenAIFileInfo fileInfo = await client.UploadFileAsync(stream, name, FileUploadPurpose.Assistants, cancellationToken).ConfigureAwait(false);
+
+ return fileInfo.Id;
+ }
+
///
/// Adds a message to the specified thread.
///
@@ -232,7 +226,7 @@ public IAsyncEnumerable GetThreadMessagesAsync(string thread
///
/// Delete the assistant definition.
///
- ///
+ /// The to monitor for cancellation requests. The default is .
/// True if assistant definition has been deleted
///
/// Assistant based agent will not be useable after deletion.
@@ -258,8 +252,28 @@ public async Task DeleteAsync(CancellationToken cancellationToken = defaul
///
/// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
///
+ public IAsyncEnumerable InvokeAsync(
+ string threadId,
+ KernelArguments? arguments = null,
+ Kernel? kernel = null,
+ CancellationToken cancellationToken = default)
+ => this.InvokeAsync(threadId, options: null, arguments, kernel, cancellationToken);
+
+ ///
+ /// Invoke the assistant on the specified thread.
+ ///
+ /// The thread identifier
+ /// Optional invocation options
+ /// Optional arguments to pass to the agents's invocation, including any .
+ /// The containing services, plugins, and other state for use by the agent.
+ /// The to monitor for cancellation requests. The default is .
+ /// Asynchronous enumeration of messages.
+ ///
+ /// The `arguments` parameter is not currently used by the agent, but is provided for future extensibility.
+ ///
public async IAsyncEnumerable InvokeAsync(
string threadId,
+ OpenAIAssistantInvocationOptions? options,
KernelArguments? arguments = null,
Kernel? kernel = null,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
@@ -269,7 +283,7 @@ public async IAsyncEnumerable InvokeAsync(
kernel ??= this.Kernel;
arguments ??= this.Arguments;
- await foreach ((bool isVisible, ChatMessageContent message) in AssistantThreadActions.InvokeAsync(this, this._client, threadId, this._config.Polling, this.Logger, kernel, arguments, cancellationToken).ConfigureAwait(false))
+ await foreach ((bool isVisible, ChatMessageContent message) in AssistantThreadActions.InvokeAsync(this, this._client, threadId, options, this.Logger, kernel, arguments, cancellationToken).ConfigureAwait(false))
{
if (isVisible)
{
@@ -282,29 +296,11 @@ public async IAsyncEnumerable InvokeAsync(
protected override IEnumerable GetChannelKeys()
{
// Distinguish from other channel types.
- yield return typeof(AgentChannel).FullName!;
+ yield return typeof(OpenAIAssistantChannel).FullName!;
- // Distinguish between different Azure OpenAI endpoints or OpenAI services.
- yield return this._config.Endpoint ?? "openai";
-
- // Distinguish between different API versioning.
- if (this._config.Version.HasValue)
+ foreach (string key in this._channelKeys)
{
- yield return this._config.Version.ToString()!;
- }
-
- // Custom client receives dedicated channel.
- if (this._config.HttpClient is not null)
- {
- if (this._config.HttpClient.BaseAddress is not null)
- {
- yield return this._config.HttpClient.BaseAddress.AbsoluteUri;
- }
-
- foreach (string header in this._config.HttpClient.DefaultRequestHeaders.SelectMany(h => h.Value))
- {
- yield return header;
- }
+ yield return key;
}
}
@@ -313,10 +309,12 @@ protected override async Task CreateChannelAsync(CancellationToken
{
this.Logger.LogOpenAIAssistantAgentCreatingChannel(nameof(CreateChannelAsync), nameof(OpenAIAssistantChannel));
- AssistantThread thread = await this._client.CreateThreadAsync(cancellationToken).ConfigureAwait(false);
+ AssistantThread thread = await this._client.CreateThreadAsync(options: null, cancellationToken).ConfigureAwait(false);
+
+ this.Logger.LogInformation("[{MethodName}] Created assistant thread: {ThreadId}", nameof(CreateChannelAsync), thread.Id);
OpenAIAssistantChannel channel =
- new(this._client, thread.Id, this._config.Polling)
+ new(this._client, thread.Id)
{
Logger = this.LoggerFactory.CreateLogger()
};
@@ -338,13 +336,16 @@ internal void ThrowIfDeleted()
/// Initializes a new instance of the class.
///
private OpenAIAssistantAgent(
- AssistantsClient client,
Assistant model,
- OpenAIAssistantConfiguration config)
+ OpenAIClientProvider provider,
+ AssistantClient client)
{
+ this._provider = provider;
this._assistant = model;
- this._client = client;
- this._config = config;
+ this._client = provider.Client.GetAssistantClient();
+ this._channelKeys = provider.ConfigurationKeys.ToArray();
+
+ this.Definition = CreateAssistantDefinition(model);
this.Description = this._assistant.Description;
this.Id = this._assistant.Id;
@@ -352,64 +353,94 @@ private OpenAIAssistantAgent(
this.Instructions = this._assistant.Instructions;
}
+ private static OpenAIAssistantDefinition CreateAssistantDefinition(Assistant model)
+ {
+ OpenAIAssistantExecutionOptions? options = null;
+
+ if (model.Metadata.TryGetValue(OptionsMetadataKey, out string? optionsJson))
+ {
+ options = JsonSerializer.Deserialize(optionsJson);
+ }
+
+ IReadOnlyList? fileIds = (IReadOnlyList?)model.ToolResources?.CodeInterpreter?.FileIds;
+ string? vectorStoreId = model.ToolResources?.FileSearch?.VectorStoreIds?.SingleOrDefault();
+ bool enableJsonResponse = model.ResponseFormat is not null && model.ResponseFormat == AssistantResponseFormat.JsonObject;
+
+ return new(model.Model)
+ {
+ Id = model.Id,
+ Name = model.Name,
+ Description = model.Description,
+ Instructions = model.Instructions,
+ CodeInterpreterFileIds = fileIds,
+ EnableCodeInterpreter = model.Tools.Any(t => t is CodeInterpreterToolDefinition),
+ EnableFileSearch = model.Tools.Any(t => t is FileSearchToolDefinition),
+ Metadata = model.Metadata,
+ EnableJsonResponse = enableJsonResponse,
+ TopP = model.NucleusSamplingFactor,
+ Temperature = model.Temperature,
+ VectorStoreId = string.IsNullOrWhiteSpace(vectorStoreId) ? null : vectorStoreId,
+ ExecutionOptions = options,
+ };
+ }
+
private static AssistantCreationOptions CreateAssistantCreationOptions(OpenAIAssistantDefinition definition)
{
AssistantCreationOptions assistantCreationOptions =
- new(definition.ModelId)
+ new()
{
Description = definition.Description,
Instructions = definition.Instructions,
Name = definition.Name,
- Metadata = definition.Metadata?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value),
+ ToolResources =
+ AssistantToolResourcesFactory.GenerateToolResources(
+ definition.EnableFileSearch ? definition.VectorStoreId : null,
+ definition.EnableCodeInterpreter ? definition.CodeInterpreterFileIds : null),
+ ResponseFormat = definition.EnableJsonResponse ? AssistantResponseFormat.JsonObject : AssistantResponseFormat.Auto,
+ Temperature = definition.Temperature,
+ NucleusSamplingFactor = definition.TopP,
};
- assistantCreationOptions.FileIds.AddRange(definition.FileIds ?? []);
+ if (definition.Metadata != null)
+ {
+ foreach (KeyValuePair item in definition.Metadata)
+ {
+ assistantCreationOptions.Metadata[item.Key] = item.Value;
+ }
+ }
+
+ if (definition.ExecutionOptions != null)
+ {
+ string optionsJson = JsonSerializer.Serialize(definition.ExecutionOptions);
+ assistantCreationOptions.Metadata[OptionsMetadataKey] = optionsJson;
+ }
if (definition.EnableCodeInterpreter)
{
- assistantCreationOptions.Tools.Add(new CodeInterpreterToolDefinition());
+ assistantCreationOptions.Tools.Add(ToolDefinition.CreateCodeInterpreter());
}
- if (definition.EnableRetrieval)
+ if (definition.EnableFileSearch)
{
- assistantCreationOptions.Tools.Add(new RetrievalToolDefinition());
+ assistantCreationOptions.Tools.Add(ToolDefinition.CreateFileSearch());
}
return assistantCreationOptions;
}
- private static AssistantsClient CreateClient(OpenAIAssistantConfiguration config)
+ private static AssistantClient CreateClient(OpenAIClientProvider config)
{
- AssistantsClientOptions clientOptions = CreateClientOptions(config);
-
- // Inspect options
- if (!string.IsNullOrWhiteSpace(config.Endpoint))
- {
- // Create client configured for Azure OpenAI, if endpoint definition is present.
- return new AssistantsClient(new Uri(config.Endpoint), new AzureKeyCredential(config.ApiKey), clientOptions);
- }
-
- // Otherwise, create client configured for OpenAI.
- return new AssistantsClient(config.ApiKey, clientOptions);
+ return config.Client.GetAssistantClient();
}
- private static AssistantsClientOptions CreateClientOptions(OpenAIAssistantConfiguration config)
+ private static IEnumerable DefineChannelKeys(OpenAIClientProvider config)
{
- AssistantsClientOptions options =
- config.Version.HasValue ?
- new(config.Version.Value) :
- new();
-
- options.Diagnostics.ApplicationId = HttpHeaderConstant.Values.UserAgent;
- options.AddPolicy(new AddHeaderRequestPolicy(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(OpenAIAssistantAgent))), HttpPipelinePosition.PerCall);
+ // Distinguish from other channel types.
+ yield return typeof(AgentChannel).FullName!;
- if (config.HttpClient is not null)
+ foreach (string key in config.ConfigurationKeys)
{
- options.Transport = new HttpClientTransport(config.HttpClient);
- options.RetryPolicy = new RetryPolicy(maxRetries: 0); // Disable Azure SDK retry policy if and only if a custom HttpClient is provided.
- options.Retry.NetworkTimeout = Timeout.InfiniteTimeSpan; // Disable Azure SDK default timeout
+ yield return key;
}
-
- return options;
}
}
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs
index 5b4600e64542..72fbb026b05a 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs
@@ -2,17 +2,18 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
-using Azure.AI.OpenAI.Assistants;
+using Microsoft.SemanticKernel.Agents.OpenAI.Internal;
+using OpenAI.Assistants;
namespace Microsoft.SemanticKernel.Agents.OpenAI;
///
/// A specialization for use with .
///
-internal sealed class OpenAIAssistantChannel(AssistantsClient client, string threadId, OpenAIAssistantConfiguration.PollingConfiguration pollingConfiguration)
+internal sealed class OpenAIAssistantChannel(AssistantClient client, string threadId)
: AgentChannel
{
- private readonly AssistantsClient _client = client;
+ private readonly AssistantClient _client = client;
private readonly string _threadId = threadId;
///
@@ -31,7 +32,7 @@ protected override async Task ReceiveAsync(IEnumerable histo
{
agent.ThrowIfDeleted();
- return AssistantThreadActions.InvokeAsync(agent, this._client, this._threadId, pollingConfiguration, this.Logger, agent.Kernel, agent.Arguments, cancellationToken);
+ return AssistantThreadActions.InvokeAsync(agent, this._client, this._threadId, invocationOptions: null, this.Logger, agent.Kernel, agent.Arguments, cancellationToken);
}
///
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantConfiguration.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantConfiguration.cs
deleted file mode 100644
index aa037266e7d5..000000000000
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantConfiguration.cs
+++ /dev/null
@@ -1,91 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System;
-using System.Net.Http;
-using Azure.AI.OpenAI.Assistants;
-
-namespace Microsoft.SemanticKernel.Agents.OpenAI;
-
-///
-/// Configuration to target an OpenAI Assistant API.
-///
-public sealed class OpenAIAssistantConfiguration
-{
- ///
- /// The Assistants API Key.
- ///
- public string ApiKey { get; }
-
- ///
- /// An optional endpoint if targeting Azure OpenAI Assistants API.
- ///
- public string? Endpoint { get; }
-
- ///
- /// An optional API version override.
- ///
- public AssistantsClientOptions.ServiceVersion? Version { get; init; }
-
- ///
- /// Custom for HTTP requests.
- ///
- public HttpClient? HttpClient { get; init; }
-
- ///
- /// Defineds polling behavior for Assistant API requests.
- ///
- public PollingConfiguration Polling { get; } = new PollingConfiguration();
-
- ///
- /// Initializes a new instance of the class.
- ///
- /// The Assistants API Key
- /// An optional endpoint if targeting Azure OpenAI Assistants API
- public OpenAIAssistantConfiguration(string apiKey, string? endpoint = null)
- {
- Verify.NotNullOrWhiteSpace(apiKey);
- if (!string.IsNullOrWhiteSpace(endpoint))
- {
- // Only verify `endpoint` when provided (AzureOAI vs OpenAI)
- Verify.StartsWith(endpoint, "https://", "The Azure OpenAI endpoint must start with 'https://'");
- }
-
- this.ApiKey = apiKey;
- this.Endpoint = endpoint;
- }
-
- ///
- /// Configuration and defaults associated with polling behavior for Assistant API requests.
- ///
- public sealed class PollingConfiguration
- {
- ///
- /// The default polling interval when monitoring thread-run status.
- ///
- public static TimeSpan DefaultPollingInterval { get; } = TimeSpan.FromMilliseconds(500);
-
- ///
- /// The default back-off interval when monitoring thread-run status.
- ///
- public static TimeSpan DefaultPollingBackoff { get; } = TimeSpan.FromSeconds(1);
-
- ///
- /// The default polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag.
- ///
- public static TimeSpan DefaultMessageSynchronizationDelay { get; } = TimeSpan.FromMilliseconds(500);
-
- ///
- /// The polling interval when monitoring thread-run status.
- ///
- public TimeSpan RunPollingInterval { get; set; } = DefaultPollingInterval;
-
- ///
- /// The back-off interval when monitoring thread-run status.
- ///
- public TimeSpan RunPollingBackoff { get; set; } = DefaultPollingBackoff;
-
- ///
- /// The polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag.
- ///
- public TimeSpan MessageSynchronizationDelay { get; set; } = DefaultMessageSynchronizationDelay;
- }
-}
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs
index 3699e07ee1ed..7b7015aa3b4a 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantDefinition.cs
@@ -1,57 +1,112 @@
// Copyright (c) Microsoft. All rights reserved.
using System.Collections.Generic;
+using System.Text.Json.Serialization;
namespace Microsoft.SemanticKernel.Agents.OpenAI;
///
-/// The data associated with an assistant's definition.
+/// Defines an assistant.
///
public sealed class OpenAIAssistantDefinition
{
///
- /// Identifies the AI model (OpenAI) or deployment (AzureOAI) this agent targets.
+ /// Identifies the AI model targeted by the agent.
///
- public string? ModelId { get; init; }
+ public string ModelId { get; }
///
/// The description of the assistant.
///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Description { get; init; }
///
/// The assistant's unique id. (Ignored on create.)
///
- public string? Id { get; init; }
+ public string Id { get; init; } = string.Empty;
///
/// The system instructions for the assistant to use.
///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Instructions { get; init; }
///
/// The name of the assistant.
///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Name { get; init; }
+ ///
+ /// Optional file-ids made available to the code_interpreter tool, if enabled.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public IReadOnlyList? CodeInterpreterFileIds { get; init; }
+
///
/// Set if code-interpreter is enabled.
///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public bool EnableCodeInterpreter { get; init; }
///
- /// Set if retrieval is enabled.
+ /// Set if file-search is enabled.
///
- public bool EnableRetrieval { get; init; }
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
+ public bool EnableFileSearch { get; init; }
///
- /// A list of previously uploaded file IDs to attach to the assistant.
+ /// Set if json response-format is enabled.
///
- public IEnumerable? FileIds { get; init; }
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
+ public bool EnableJsonResponse { get; init; }
///
/// A set of up to 16 key/value pairs that can be attached to an agent, used for
/// storing additional information about that object in a structured format.Keys
/// may be up to 64 characters in length and values may be up to 512 characters in length.
///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public IReadOnlyDictionary? Metadata { get; init; }
+
+ ///
+ /// The sampling temperature to use, between 0 and 2.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public float? Temperature { get; init; }
+
+ ///
+ /// An alternative to sampling with temperature, called nucleus sampling, where the model
+ /// considers the results of the tokens with top_p probability mass.
+ /// So 0.1 means only the tokens comprising the top 10% probability mass are considered.
+ ///
+ ///
+ /// Recommended to set this or temperature but not both.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public float? TopP { get; init; }
+
+ ///
+ /// Requires file-search if specified.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? VectorStoreId { get; init; }
+
+ ///
+ /// Default execution options for each agent invocation.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public OpenAIAssistantExecutionOptions? ExecutionOptions { get; init; }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The targeted model
+ [JsonConstructor]
+ public OpenAIAssistantDefinition(string modelId)
+ {
+ Verify.NotNullOrWhiteSpace(modelId);
+
+ this.ModelId = modelId;
+ }
}
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs
new file mode 100644
index 000000000000..074b92831c92
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs
@@ -0,0 +1,38 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Text.Json.Serialization;
+
+namespace Microsoft.SemanticKernel.Agents.OpenAI;
+
+///
+/// Defines assistant execution options for each invocation.
+///
+///
+/// These options are persisted as a single entry of the assistant's metadata with key: "__run_options"
+///
+public sealed class OpenAIAssistantExecutionOptions
+{
+ ///
+ /// The maximum number of completion tokens that may be used over the course of the run.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public int? MaxCompletionTokens { get; init; }
+
+ ///
+ /// The maximum number of prompt tokens that may be used over the course of the run.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public int? MaxPromptTokens { get; init; }
+
+ ///
+ /// Enables parallel function calling during tool use. Enabled by default.
+ /// Use this property to disable.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public bool? ParallelToolCallsEnabled { get; init; }
+
+ ///
+ /// When set, the thread will be truncated to the N most recent messages in the thread.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public int? TruncationMessageCount { get; init; }
+}
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs
new file mode 100644
index 000000000000..0653c83a13e2
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs
@@ -0,0 +1,88 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using System.Text.Json.Serialization;
+
+namespace Microsoft.SemanticKernel.Agents.OpenAI;
+
+///
+/// Defines per invocation execution options that override the assistant definition.
+///
+///
+/// Not applicable to usage.
+///
+public sealed class OpenAIAssistantInvocationOptions
+{
+ ///
+ /// Override the AI model targeted by the agent.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? ModelName { get; init; }
+
+ ///
+ /// Set if code_interpreter tool is enabled.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
+ public bool EnableCodeInterpreter { get; init; }
+
+ ///
+ /// Set if file_search tool is enabled.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
+ public bool EnableFileSearch { get; init; }
+
+ ///
+ /// Set if json response-format is enabled.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public bool? EnableJsonResponse { get; init; }
+
+ ///
+ /// The maximum number of completion tokens that may be used over the course of the run.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public int? MaxCompletionTokens { get; init; }
+
+ ///
+ /// The maximum number of prompt tokens that may be used over the course of the run.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public int? MaxPromptTokens { get; init; }
+
+ ///
+ /// Enables parallel function calling during tool use. Enabled by default.
+ /// Use this property to disable.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public bool? ParallelToolCallsEnabled { get; init; }
+
+ ///
+ /// When set, the thread will be truncated to the N most recent messages in the thread.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public int? TruncationMessageCount { get; init; }
+
+ ///
+ /// The sampling temperature to use, between 0 and 2.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public float? Temperature { get; init; }
+
+ ///
+ /// An alternative to sampling with temperature, called nucleus sampling, where the model
+ /// considers the results of the tokens with top_p probability mass.
+ /// So 0.1 means only the tokens comprising the top 10% probability mass are considered.
+ ///
+ ///
+ /// Recommended to set this or temperature but not both.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public float? TopP { get; init; }
+
+ ///
+ /// A set of up to 16 key/value pairs that can be attached to an agent, used for
+ /// storing additional information about that object in a structured format.Keys
+ /// may be up to 64 characters in length and values may be up to 512 characters in length.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public IReadOnlyDictionary? Metadata { get; init; }
+}
diff --git a/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs b/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs
new file mode 100644
index 000000000000..0b60b66fa84a
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/OpenAIClientProvider.cs
@@ -0,0 +1,172 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System;
+using System.ClientModel;
+using System.ClientModel.Primitives;
+using System.Collections.Generic;
+using System.Linq;
+using System.Net.Http;
+using System.Threading;
+using Azure.AI.OpenAI;
+using Azure.Core;
+using Microsoft.SemanticKernel.Http;
+using OpenAI;
+
+namespace Microsoft.SemanticKernel.Agents.OpenAI;
+
+///
+/// Provides an for use by .
+///
+public sealed class OpenAIClientProvider
+{
+ ///
+ /// Avoids an exception from OpenAI Client when a custom endpoint is provided without an API key.
+ ///
+ private const string SingleSpaceKey = " ";
+
+ ///
+ /// An active client instance.
+ ///
+ public OpenAIClient Client { get; }
+
+ ///
+ /// Configuration keys required for management.
+ ///
+ internal IReadOnlyList ConfigurationKeys { get; }
+
+ private OpenAIClientProvider(OpenAIClient client, IEnumerable keys)
+ {
+ this.Client = client;
+ this.ConfigurationKeys = keys.ToArray();
+ }
+
+ ///
+ /// Produce a based on .
+ ///
+ /// The API key
+ /// The service endpoint
+ /// Custom for HTTP requests.
+ public static OpenAIClientProvider ForAzureOpenAI(ApiKeyCredential apiKey, Uri endpoint, HttpClient? httpClient = null)
+ {
+ Verify.NotNull(apiKey, nameof(apiKey));
+ Verify.NotNull(endpoint, nameof(endpoint));
+
+ AzureOpenAIClientOptions clientOptions = CreateAzureClientOptions(httpClient);
+
+ return new(new AzureOpenAIClient(endpoint, apiKey!, clientOptions), CreateConfigurationKeys(endpoint, httpClient));
+ }
+
+ ///
+ /// Produce a based on .
+ ///
+ /// The credentials
+ /// The service endpoint
+ /// Custom for HTTP requests.
+ public static OpenAIClientProvider ForAzureOpenAI(TokenCredential credential, Uri endpoint, HttpClient? httpClient = null)
+ {
+ Verify.NotNull(credential, nameof(credential));
+ Verify.NotNull(endpoint, nameof(endpoint));
+
+ AzureOpenAIClientOptions clientOptions = CreateAzureClientOptions(httpClient);
+
+ return new(new AzureOpenAIClient(endpoint, credential, clientOptions), CreateConfigurationKeys(endpoint, httpClient));
+ }
+
+ ///
+ /// Produce a based on .
+ ///
+ /// An optional endpoint
+ /// Custom for HTTP requests.
+ public static OpenAIClientProvider ForOpenAI(Uri? endpoint = null, HttpClient? httpClient = null)
+ {
+ OpenAIClientOptions clientOptions = CreateOpenAIClientOptions(endpoint, httpClient);
+ return new(new OpenAIClient(SingleSpaceKey, clientOptions), CreateConfigurationKeys(endpoint, httpClient));
+ }
+
+ ///
+ /// Produce a based on .
+ ///
+ /// The API key
+ /// An optional endpoint
+ /// Custom for HTTP requests.
+ public static OpenAIClientProvider ForOpenAI(ApiKeyCredential apiKey, Uri? endpoint = null, HttpClient? httpClient = null)
+ {
+ OpenAIClientOptions clientOptions = CreateOpenAIClientOptions(endpoint, httpClient);
+ return new(new OpenAIClient(apiKey ?? SingleSpaceKey, clientOptions), CreateConfigurationKeys(endpoint, httpClient));
+ }
+
+ ///
+ /// Directly provide a client instance.
+ ///
+ public static OpenAIClientProvider FromClient(OpenAIClient client)
+ {
+ return new(client, [client.GetType().FullName!, client.GetHashCode().ToString()]);
+ }
+
+ private static AzureOpenAIClientOptions CreateAzureClientOptions(HttpClient? httpClient)
+ {
+ AzureOpenAIClientOptions options = new()
+ {
+ ApplicationId = HttpHeaderConstant.Values.UserAgent
+ };
+
+ ConfigureClientOptions(httpClient, options);
+
+ return options;
+ }
+
+ private static OpenAIClientOptions CreateOpenAIClientOptions(Uri? endpoint, HttpClient? httpClient)
+ {
+ OpenAIClientOptions options = new()
+ {
+ ApplicationId = HttpHeaderConstant.Values.UserAgent,
+ Endpoint = endpoint ?? httpClient?.BaseAddress,
+ };
+
+ ConfigureClientOptions(httpClient, options);
+
+ return options;
+ }
+
+ private static void ConfigureClientOptions(HttpClient? httpClient, ClientPipelineOptions options)
+ {
+ options.AddPolicy(CreateRequestHeaderPolicy(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(OpenAIAssistantAgent))), PipelinePosition.PerCall);
+
+ if (httpClient is not null)
+ {
+ options.Transport = new HttpClientPipelineTransport(httpClient);
+ options.RetryPolicy = new ClientRetryPolicy(maxRetries: 0); // Disable retry policy if and only if a custom HttpClient is provided.
+ options.NetworkTimeout = Timeout.InfiniteTimeSpan; // Disable default timeout
+ }
+ }
+
+ private static GenericActionPipelinePolicy CreateRequestHeaderPolicy(string headerName, string headerValue)
+ =>
+ new((message) =>
+ {
+ if (message?.Request?.Headers?.TryGetValue(headerName, out string? _) == false)
+ {
+ message.Request.Headers.Set(headerName, headerValue);
+ }
+ });
+
+ private static IEnumerable CreateConfigurationKeys(Uri? endpoint, HttpClient? httpClient)
+ {
+ if (endpoint != null)
+ {
+ yield return endpoint.ToString();
+ }
+
+ if (httpClient is not null)
+ {
+ if (httpClient.BaseAddress is not null)
+ {
+ yield return httpClient.BaseAddress.AbsoluteUri;
+ }
+
+ foreach (string header in httpClient.DefaultRequestHeaders.SelectMany(h => h.Value))
+ {
+ yield return header;
+ }
+ }
+ }
+}
diff --git a/dotnet/src/Agents/OpenAI/OpenAIThreadCreationOptions.cs b/dotnet/src/Agents/OpenAI/OpenAIThreadCreationOptions.cs
new file mode 100644
index 000000000000..3f39c43d03dc
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/OpenAIThreadCreationOptions.cs
@@ -0,0 +1,37 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using System.Text.Json.Serialization;
+
+namespace Microsoft.SemanticKernel.Agents.OpenAI;
+
+///
+/// Thread creation options.
+///
+public sealed class OpenAIThreadCreationOptions
+{
+ ///
+ /// Optional file-ids made available to the code_interpreter tool, if enabled.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public IReadOnlyList? CodeInterpreterFileIds { get; init; }
+
+ ///
+ /// Optional messages to initialize thread with..
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public IReadOnlyList? Messages { get; init; }
+
+ ///
+ /// Enables file-search if specified.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? VectorStoreId { get; init; }
+
+ ///
+ /// A set of up to 16 key/value pairs that can be attached to an agent, used for
+ /// storing additional information about that object in a structured format.Keys
+ /// may be up to 64 characters in length and values may be up to 512 characters in length.
+ ///
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public IReadOnlyDictionary? Metadata { get; init; }
+}
diff --git a/dotnet/src/Agents/OpenAI/RunPollingOptions.cs b/dotnet/src/Agents/OpenAI/RunPollingOptions.cs
new file mode 100644
index 000000000000..756ba689131c
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/RunPollingOptions.cs
@@ -0,0 +1,57 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System;
+
+namespace Microsoft.SemanticKernel.Agents.OpenAI;
+
+///
+/// Configuration and defaults associated with polling behavior for Assistant API run processing.
+///
+public sealed class RunPollingOptions
+{
+ ///
+ /// The default polling interval when monitoring thread-run status.
+ ///
+ public static TimeSpan DefaultPollingInterval { get; } = TimeSpan.FromMilliseconds(500);
+
+ ///
+ /// The default back-off interval when monitoring thread-run status.
+ ///
+ public static TimeSpan DefaultPollingBackoff { get; } = TimeSpan.FromSeconds(1);
+
+ ///
+ /// The default number of polling iterations before using .
+ ///
+ public static int DefaultPollingBackoffThreshold { get; } = 2;
+
+ ///
+ /// The default polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag.
+ ///
+ public static TimeSpan DefaultMessageSynchronizationDelay { get; } = TimeSpan.FromMilliseconds(500);
+
+ ///
+ /// The polling interval when monitoring thread-run status.
+ ///
+ public TimeSpan RunPollingInterval { get; set; } = DefaultPollingInterval;
+
+ ///
+ /// The back-off interval when monitoring thread-run status.
+ ///
+ public TimeSpan RunPollingBackoff { get; set; } = DefaultPollingBackoff;
+
+ ///
+ /// The number of polling iterations before using .
+ ///
+ public int RunPollingBackoffThreshold { get; set; } = DefaultPollingBackoffThreshold;
+
+ ///
+ /// The polling delay when retrying message retrieval due to a 404/NotFound from synchronization lag.
+ ///
+ public TimeSpan MessageSynchronizationDelay { get; set; } = DefaultMessageSynchronizationDelay;
+
+ ///
+ /// Gets the polling interval for the specified iteration count.
+ ///
+ /// The number of polling iterations already attempted
+ public TimeSpan GetPollingInterval(int iterationCount) =>
+ iterationCount > this.RunPollingBackoffThreshold ? this.RunPollingBackoff : this.RunPollingInterval;
+}
diff --git a/dotnet/src/Agents/UnitTests/AgentChannelTests.cs b/dotnet/src/Agents/UnitTests/AgentChannelTests.cs
index 50aa328ebc67..84558e002b4f 100644
--- a/dotnet/src/Agents/UnitTests/AgentChannelTests.cs
+++ b/dotnet/src/Agents/UnitTests/AgentChannelTests.cs
@@ -23,20 +23,26 @@ public class AgentChannelTests
[Fact]
public async Task VerifyAgentChannelUpcastAsync()
{
+ // Arrange
TestChannel channel = new();
+ // Assert
Assert.Equal(0, channel.InvokeCount);
- var messages = channel.InvokeAgentAsync(new TestAgent()).ToArrayAsync();
+ // Act
+ var messages = channel.InvokeAgentAsync(new MockAgent()).ToArrayAsync();
+ // Assert
Assert.Equal(1, channel.InvokeCount);
+ // Act
await Assert.ThrowsAsync(() => channel.InvokeAgentAsync(new NextAgent()).ToArrayAsync().AsTask());
+ // Assert
Assert.Equal(1, channel.InvokeCount);
}
///
/// Not using mock as the goal here is to provide entrypoint to protected method.
///
- private sealed class TestChannel : AgentChannel
+ private sealed class TestChannel : AgentChannel
{
public int InvokeCount { get; private set; }
@@ -44,7 +50,7 @@ private sealed class TestChannel : AgentChannel
=> base.InvokeAsync(agent, cancellationToken);
#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously
- protected internal override async IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync(TestAgent agent, [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ protected internal override async IAsyncEnumerable<(bool IsVisible, ChatMessageContent Message)> InvokeAsync(MockAgent agent, [EnumeratorCancellation] CancellationToken cancellationToken = default)
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
{
this.InvokeCount++;
@@ -68,18 +74,5 @@ protected internal override Task ResetAsync(CancellationToken cancellationToken
}
}
- private sealed class NextAgent : TestAgent;
-
- private class TestAgent : KernelAgent
- {
- protected internal override Task CreateChannelAsync(CancellationToken cancellationToken)
- {
- throw new NotImplementedException();
- }
-
- protected internal override IEnumerable GetChannelKeys()
- {
- throw new NotImplementedException();
- }
- }
+ private sealed class NextAgent : MockAgent;
}
diff --git a/dotnet/src/Agents/UnitTests/AgentChatTests.cs b/dotnet/src/Agents/UnitTests/AgentChatTests.cs
index fc295e2b5550..fe6af0b3aee6 100644
--- a/dotnet/src/Agents/UnitTests/AgentChatTests.cs
+++ b/dotnet/src/Agents/UnitTests/AgentChatTests.cs
@@ -3,9 +3,11 @@
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
+using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.ChatCompletion;
+using Moq;
using Xunit;
namespace SemanticKernel.Agents.UnitTests;
@@ -21,36 +23,36 @@ public class AgentChatTests
[Fact]
public async Task VerifyAgentChatLifecycleAsync()
{
- // Create chat
+ // Arrange: Create chat
TestChat chat = new();
- // Verify initial state
+ // Assert: Verify initial state
Assert.False(chat.IsActive);
await this.VerifyHistoryAsync(expectedCount: 0, chat.GetChatMessagesAsync()); // Primary history
await this.VerifyHistoryAsync(expectedCount: 0, chat.GetChatMessagesAsync(chat.Agent)); // Agent history
- // Inject history
+ // Act: Inject history
chat.AddChatMessages([new ChatMessageContent(AuthorRole.User, "More")]);
chat.AddChatMessages([new ChatMessageContent(AuthorRole.User, "And then some")]);
- // Verify updated history
+ // Assert: Verify updated history
await this.VerifyHistoryAsync(expectedCount: 2, chat.GetChatMessagesAsync()); // Primary history
await this.VerifyHistoryAsync(expectedCount: 0, chat.GetChatMessagesAsync(chat.Agent)); // Agent hasn't joined
- // Invoke with input & verify (agent joins chat)
+ // Act: Invoke with input & verify (agent joins chat)
chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, "hi"));
await chat.InvokeAsync().ToArrayAsync();
- Assert.Equal(1, chat.Agent.InvokeCount);
- // Verify updated history
+ // Assert: Verify updated history
+ Assert.Equal(1, chat.Agent.InvokeCount);
await this.VerifyHistoryAsync(expectedCount: 4, chat.GetChatMessagesAsync()); // Primary history
await this.VerifyHistoryAsync(expectedCount: 4, chat.GetChatMessagesAsync(chat.Agent)); // Agent history
- // Invoke without input & verify
+ // Act: Invoke without input
await chat.InvokeAsync().ToArrayAsync();
- Assert.Equal(2, chat.Agent.InvokeCount);
- // Verify final history
+ // Assert: Verify final history
+ Assert.Equal(2, chat.Agent.InvokeCount);
await this.VerifyHistoryAsync(expectedCount: 5, chat.GetChatMessagesAsync()); // Primary history
await this.VerifyHistoryAsync(expectedCount: 5, chat.GetChatMessagesAsync(chat.Agent)); // Agent history
@@ -63,19 +65,46 @@ public async Task VerifyAgentChatLifecycleAsync()
await this.VerifyHistoryAsync(expectedCount: 0, chat.GetChatMessagesAsync(chat.Agent)); // Agent history
}
+ ///
+ /// Verify throw exception for system message.
+ ///
+ [Fact]
+ public void VerifyAgentChatRejectsSystemMessage()
+ {
+ // Arrange: Create chat
+ TestChat chat = new() { LoggerFactory = new Mock().Object };
+
+ // Assert and Act: Verify system message not accepted
+ Assert.Throws(() => chat.AddChatMessage(new ChatMessageContent(AuthorRole.System, "hi")));
+ }
+
+ ///
+ /// Verify throw exception for if invoked when active.
+ ///
+ [Fact]
+ public async Task VerifyAgentChatThrowsWhenActiveAsync()
+ {
+ // Arrange: Create chat
+ TestChat chat = new();
+
+ // Assert and Act: Verify system message not accepted
+ await Assert.ThrowsAsync(() => chat.InvalidInvokeAsync().ToArrayAsync().AsTask());
+ }
+
///
/// Verify the management of instances as they join .
///
[Fact(Skip = "Not 100% reliable for github workflows, but useful for dev testing.")]
public async Task VerifyGroupAgentChatConcurrencyAsync()
{
+ // Arrange
TestChat chat = new();
Task[] tasks;
int isActive = 0;
- // Queue concurrent tasks
+ // Act: Queue concurrent tasks
object syncObject = new();
lock (syncObject)
{
@@ -97,7 +126,7 @@ public async Task VerifyGroupAgentChatConcurrencyAsync()
await Task.Yield();
- // Verify failure
+ // Assert: Verify failure
await Assert.ThrowsAsync(() => Task.WhenAll(tasks));
async Task SynchronizedInvokeAsync()
@@ -127,5 +156,12 @@ private sealed class TestChat : AgentChat
public override IAsyncEnumerable InvokeAsync(
CancellationToken cancellationToken = default) =>
this.InvokeAgentAsync(this.Agent, cancellationToken);
+
+ public IAsyncEnumerable InvalidInvokeAsync(
+ CancellationToken cancellationToken = default)
+ {
+ this.SetActivityOrThrow();
+ return this.InvokeAgentAsync(this.Agent, cancellationToken);
+ }
}
}
diff --git a/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj b/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj
index d46a4ee0cd1e..6b9fea49fde2 100644
--- a/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj
+++ b/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj
@@ -8,7 +8,7 @@
true
false
12
- $(NoWarn);CA2007,CA1812,CA1861,CA1063,VSTHRD111,SKEXP0001,SKEXP0050,SKEXP0110
+ $(NoWarn);CA2007,CA1812,CA1861,CA1063,VSTHRD111,SKEXP0001,SKEXP0050,SKEXP0110;OPENAI001
@@ -32,9 +32,9 @@
+
-
diff --git a/dotnet/src/Agents/UnitTests/AggregatorAgentTests.cs b/dotnet/src/Agents/UnitTests/AggregatorAgentTests.cs
index 1a607ea7e6c7..e6668c7ea568 100644
--- a/dotnet/src/Agents/UnitTests/AggregatorAgentTests.cs
+++ b/dotnet/src/Agents/UnitTests/AggregatorAgentTests.cs
@@ -21,6 +21,7 @@ public class AggregatorAgentTests
[InlineData(AggregatorMode.Flat, 2)]
public async Task VerifyAggregatorAgentUsageAsync(AggregatorMode mode, int modeOffset)
{
+ // Arrange
Agent agent1 = CreateMockAgent();
Agent agent2 = CreateMockAgent();
Agent agent3 = CreateMockAgent();
@@ -44,38 +45,57 @@ public async Task VerifyAggregatorAgentUsageAsync(AggregatorMode mode, int modeO
// Add message to outer chat (no agent has joined)
uberChat.AddChatMessage(new ChatMessageContent(AuthorRole.User, "test uber"));
+ // Act
var messages = await uberChat.GetChatMessagesAsync().ToArrayAsync();
+ // Assert
Assert.Single(messages);
+ // Act
messages = await uberChat.GetChatMessagesAsync(uberAgent).ToArrayAsync();
+ // Assert
Assert.Empty(messages); // Agent hasn't joined chat, no broadcast
+ // Act
messages = await groupChat.GetChatMessagesAsync().ToArrayAsync();
+ // Assert
Assert.Empty(messages); // Agent hasn't joined chat, no broadcast
- // Add message to inner chat (not visible to parent)
+ // Arrange: Add message to inner chat (not visible to parent)
groupChat.AddChatMessage(new ChatMessageContent(AuthorRole.User, "test inner"));
+ // Act
messages = await uberChat.GetChatMessagesAsync().ToArrayAsync();
+ // Assert
Assert.Single(messages);
+ // Act
messages = await uberChat.GetChatMessagesAsync(uberAgent).ToArrayAsync();
+ // Assert
Assert.Empty(messages); // Agent still hasn't joined chat
+ // Act
messages = await groupChat.GetChatMessagesAsync().ToArrayAsync();
+ // Assert
Assert.Single(messages);
- // Invoke outer chat (outer chat captures final inner message)
+ // Act: Invoke outer chat (outer chat captures final inner message)
messages = await uberChat.InvokeAsync(uberAgent).ToArrayAsync();
+ // Assert
Assert.Equal(1 + modeOffset, messages.Length); // New messages generated from inner chat
+ // Act
messages = await uberChat.GetChatMessagesAsync().ToArrayAsync();
+ // Assert
Assert.Equal(2 + modeOffset, messages.Length); // Total messages on uber chat
+ // Act
messages = await groupChat.GetChatMessagesAsync().ToArrayAsync();
+ // Assert
Assert.Equal(5, messages.Length); // Total messages on inner chat once synchronized
+ // Act
messages = await uberChat.GetChatMessagesAsync(uberAgent).ToArrayAsync();
+ // Assert
Assert.Equal(5, messages.Length); // Total messages on inner chat once synchronized (agent equivalent)
}
diff --git a/dotnet/src/Agents/UnitTests/Core/AgentGroupChatTests.cs b/dotnet/src/Agents/UnitTests/Core/AgentGroupChatTests.cs
index 7c3267e3ad73..1c417a9e02ad 100644
--- a/dotnet/src/Agents/UnitTests/Core/AgentGroupChatTests.cs
+++ b/dotnet/src/Agents/UnitTests/Core/AgentGroupChatTests.cs
@@ -23,12 +23,18 @@ public class AgentGroupChatTests
[Fact]
public void VerifyGroupAgentChatDefaultState()
{
+ // Arrange
AgentGroupChat chat = new();
+
+ // Assert
Assert.Empty(chat.Agents);
Assert.NotNull(chat.ExecutionSettings);
Assert.False(chat.IsComplete);
+ // Act
chat.IsComplete = true;
+
+ // Assert
Assert.True(chat.IsComplete);
}
@@ -38,18 +44,25 @@ public void VerifyGroupAgentChatDefaultState()
[Fact]
public async Task VerifyGroupAgentChatAgentMembershipAsync()
{
+ // Arrange
Agent agent1 = CreateMockAgent();
Agent agent2 = CreateMockAgent();
Agent agent3 = CreateMockAgent();
Agent agent4 = CreateMockAgent();
AgentGroupChat chat = new(agent1, agent2);
+
+ // Assert
Assert.Equal(2, chat.Agents.Count);
+ // Act
chat.AddAgent(agent3);
+ // Assert
Assert.Equal(3, chat.Agents.Count);
+ // Act
ChatMessageContent[] messages = await chat.InvokeAsync(agent4).ToArrayAsync();
+ // Assert
Assert.Equal(4, chat.Agents.Count);
}
@@ -59,6 +72,7 @@ public async Task VerifyGroupAgentChatAgentMembershipAsync()
[Fact]
public async Task VerifyGroupAgentChatMultiTurnAsync()
{
+ // Arrange
Agent agent1 = CreateMockAgent();
Agent agent2 = CreateMockAgent();
Agent agent3 = CreateMockAgent();
@@ -78,10 +92,14 @@ public async Task VerifyGroupAgentChatMultiTurnAsync()
IsComplete = true
};
+ // Act and Assert
await Assert.ThrowsAsync(() => chat.InvokeAsync(CancellationToken.None).ToArrayAsync().AsTask());
+ // Act
chat.ExecutionSettings.TerminationStrategy.AutomaticReset = true;
var messages = await chat.InvokeAsync(CancellationToken.None).ToArrayAsync();
+
+ // Assert
Assert.Equal(9, messages.Length);
Assert.False(chat.IsComplete);
@@ -108,6 +126,7 @@ public async Task VerifyGroupAgentChatMultiTurnAsync()
[Fact]
public async Task VerifyGroupAgentChatFailedSelectionAsync()
{
+ // Arrange
AgentGroupChat chat = Create3AgentChat();
chat.ExecutionSettings =
@@ -125,6 +144,7 @@ public async Task VerifyGroupAgentChatFailedSelectionAsync()
// Remove max-limit in order to isolate the target behavior.
chat.ExecutionSettings.TerminationStrategy.MaximumIterations = int.MaxValue;
+ // Act and Assert
await Assert.ThrowsAsync(() => chat.InvokeAsync().ToArrayAsync().AsTask());
}
@@ -134,6 +154,7 @@ public async Task VerifyGroupAgentChatFailedSelectionAsync()
[Fact]
public async Task VerifyGroupAgentChatMultiTurnTerminationAsync()
{
+ // Arrange
AgentGroupChat chat = Create3AgentChat();
chat.ExecutionSettings =
@@ -147,7 +168,10 @@ public async Task VerifyGroupAgentChatMultiTurnTerminationAsync()
}
};
+ // Act
var messages = await chat.InvokeAsync(CancellationToken.None).ToArrayAsync();
+
+ // Assert
Assert.Single(messages);
Assert.True(chat.IsComplete);
}
@@ -158,6 +182,7 @@ public async Task VerifyGroupAgentChatMultiTurnTerminationAsync()
[Fact]
public async Task VerifyGroupAgentChatDiscreteTerminationAsync()
{
+ // Arrange
Agent agent1 = CreateMockAgent();
AgentGroupChat chat =
@@ -175,7 +200,10 @@ public async Task VerifyGroupAgentChatDiscreteTerminationAsync()
}
};
+ // Act
var messages = await chat.InvokeAsync(agent1).ToArrayAsync();
+
+ // Assert
Assert.Single(messages);
Assert.True(chat.IsComplete);
}
diff --git a/dotnet/src/Agents/UnitTests/Core/Chat/AgentGroupChatSettingsTests.cs b/dotnet/src/Agents/UnitTests/Core/Chat/AgentGroupChatSettingsTests.cs
index d17391ee24be..ecb5cd6eee33 100644
--- a/dotnet/src/Agents/UnitTests/Core/Chat/AgentGroupChatSettingsTests.cs
+++ b/dotnet/src/Agents/UnitTests/Core/Chat/AgentGroupChatSettingsTests.cs
@@ -16,7 +16,10 @@ public class AgentGroupChatSettingsTests
[Fact]
public void VerifyChatExecutionSettingsDefault()
{
+ // Arrange
AgentGroupChatSettings settings = new();
+
+ // Assert
Assert.IsType(settings.TerminationStrategy);
Assert.Equal(1, settings.TerminationStrategy.MaximumIterations);
Assert.IsType(settings.SelectionStrategy);
@@ -28,6 +31,7 @@ public void VerifyChatExecutionSettingsDefault()
[Fact]
public void VerifyChatExecutionContinuationStrategyDefault()
{
+ // Arrange
Mock strategyMock = new();
AgentGroupChatSettings settings =
new()
@@ -35,6 +39,7 @@ public void VerifyChatExecutionContinuationStrategyDefault()
TerminationStrategy = strategyMock.Object
};
+ // Assert
Assert.Equal(strategyMock.Object, settings.TerminationStrategy);
}
@@ -44,6 +49,7 @@ public void VerifyChatExecutionContinuationStrategyDefault()
[Fact]
public void VerifyChatExecutionSelectionStrategyDefault()
{
+ // Arrange
Mock strategyMock = new();
AgentGroupChatSettings settings =
new()
@@ -51,6 +57,7 @@ public void VerifyChatExecutionSelectionStrategyDefault()
SelectionStrategy = strategyMock.Object
};
+ // Assert
Assert.NotNull(settings.SelectionStrategy);
Assert.Equal(strategyMock.Object, settings.SelectionStrategy);
}
diff --git a/dotnet/src/Agents/UnitTests/Core/Chat/AggregatorTerminationStrategyTests.cs b/dotnet/src/Agents/UnitTests/Core/Chat/AggregatorTerminationStrategyTests.cs
index 6ad6fd75b18f..5af211c6cdf1 100644
--- a/dotnet/src/Agents/UnitTests/Core/Chat/AggregatorTerminationStrategyTests.cs
+++ b/dotnet/src/Agents/UnitTests/Core/Chat/AggregatorTerminationStrategyTests.cs
@@ -6,7 +6,6 @@
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.Chat;
-using Moq;
using Xunit;
namespace SemanticKernel.Agents.UnitTests.Core.Chat;
@@ -22,7 +21,10 @@ public class AggregatorTerminationStrategyTests
[Fact]
public void VerifyAggregateTerminationStrategyInitialState()
{
+ // Arrange
AggregatorTerminationStrategy strategy = new();
+
+ // Assert
Assert.Equal(AggregateTerminationCondition.All, strategy.Condition);
}
@@ -32,14 +34,16 @@ public void VerifyAggregateTerminationStrategyInitialState()
[Fact]
public async Task VerifyAggregateTerminationStrategyAnyAsync()
{
+ // Arrange
TerminationStrategy strategyMockTrue = new MockTerminationStrategy(terminationResult: true);
TerminationStrategy strategyMockFalse = new MockTerminationStrategy(terminationResult: false);
- Mock agentMock = new();
+ MockAgent agentMock = new();
+ // Act and Assert
await VerifyResultAsync(
expectedResult: true,
- agentMock.Object,
+ agentMock,
new(strategyMockTrue, strategyMockFalse)
{
Condition = AggregateTerminationCondition.Any,
@@ -47,7 +51,7 @@ await VerifyResultAsync(
await VerifyResultAsync(
expectedResult: false,
- agentMock.Object,
+ agentMock,
new(strategyMockFalse, strategyMockFalse)
{
Condition = AggregateTerminationCondition.Any,
@@ -55,7 +59,7 @@ await VerifyResultAsync(
await VerifyResultAsync(
expectedResult: true,
- agentMock.Object,
+ agentMock,
new(strategyMockTrue, strategyMockTrue)
{
Condition = AggregateTerminationCondition.Any,
@@ -68,14 +72,16 @@ await VerifyResultAsync(
[Fact]
public async Task VerifyAggregateTerminationStrategyAllAsync()
{
+ // Arrange
TerminationStrategy strategyMockTrue = new MockTerminationStrategy(terminationResult: true);
TerminationStrategy strategyMockFalse = new MockTerminationStrategy(terminationResult: false);
- Mock agentMock = new();
+ MockAgent agentMock = new();
+ // Act and Assert
await VerifyResultAsync(
expectedResult: false,
- agentMock.Object,
+ agentMock,
new(strategyMockTrue, strategyMockFalse)
{
Condition = AggregateTerminationCondition.All,
@@ -83,7 +89,7 @@ await VerifyResultAsync(
await VerifyResultAsync(
expectedResult: false,
- agentMock.Object,
+ agentMock,
new(strategyMockFalse, strategyMockFalse)
{
Condition = AggregateTerminationCondition.All,
@@ -91,7 +97,7 @@ await VerifyResultAsync(
await VerifyResultAsync(
expectedResult: true,
- agentMock.Object,
+ agentMock,
new(strategyMockTrue, strategyMockTrue)
{
Condition = AggregateTerminationCondition.All,
@@ -104,34 +110,39 @@ await VerifyResultAsync(
[Fact]
public async Task VerifyAggregateTerminationStrategyAgentAsync()
{
+ // Arrange
TerminationStrategy strategyMockTrue = new MockTerminationStrategy(terminationResult: true);
TerminationStrategy strategyMockFalse = new MockTerminationStrategy(terminationResult: false);
- Mock agentMockA = new();
- Mock agentMockB = new();
+ MockAgent agentMockA = new();
+ MockAgent agentMockB = new();
+ // Act and Assert
await VerifyResultAsync(
expectedResult: false,
- agentMockB.Object,
+ agentMockB,
new(strategyMockTrue, strategyMockTrue)
{
- Agents = [agentMockA.Object],
+ Agents = [agentMockA],
Condition = AggregateTerminationCondition.All,
});
await VerifyResultAsync(
expectedResult: true,
- agentMockB.Object,
+ agentMockB,
new(strategyMockTrue, strategyMockTrue)
{
- Agents = [agentMockB.Object],
+ Agents = [agentMockB],
Condition = AggregateTerminationCondition.All,
});
}
private static async Task VerifyResultAsync(bool expectedResult, Agent agent, AggregatorTerminationStrategy strategyRoot)
{
+ // Act
var result = await strategyRoot.ShouldTerminateAsync(agent, []);
+
+ // Assert
Assert.Equal(expectedResult, result);
}
diff --git a/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionSelectionStrategyTests.cs b/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionSelectionStrategyTests.cs
index 275ef0e0bf5e..a9f1d461ed85 100644
--- a/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionSelectionStrategyTests.cs
+++ b/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionSelectionStrategyTests.cs
@@ -5,7 +5,6 @@
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.Chat;
using Microsoft.SemanticKernel.Connectors.OpenAI;
-using Moq;
using Xunit;
namespace SemanticKernel.Agents.UnitTests.Core.Chat;
@@ -21,8 +20,9 @@ public class KernelFunctionSelectionStrategyTests
[Fact]
public async Task VerifyKernelFunctionSelectionStrategyDefaultsAsync()
{
- Mock mockAgent = new();
- KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin(mockAgent.Object.Id));
+ // Arrange
+ MockAgent mockAgent = new();
+ KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin(mockAgent.Id));
KernelFunctionSelectionStrategy strategy =
new(plugin.Single(), new())
@@ -32,16 +32,40 @@ public async Task VerifyKernelFunctionSelectionStrategyDefaultsAsync()
ResultParser = (result) => result.GetValue() ?? string.Empty,
};
+ // Assert
Assert.Null(strategy.Arguments);
Assert.NotNull(strategy.Kernel);
Assert.NotNull(strategy.ResultParser);
Assert.Equal("_a_", strategy.AgentsVariableName);
Assert.Equal("_h_", strategy.HistoryVariableName);
- Agent nextAgent = await strategy.NextAsync([mockAgent.Object], []);
+ // Act
+ Agent nextAgent = await strategy.NextAsync([mockAgent], []);
+ // Assert
Assert.NotNull(nextAgent);
- Assert.Equal(mockAgent.Object, nextAgent);
+ Assert.Equal(mockAgent, nextAgent);
+ }
+
+ ///
+ /// Verify strategy mismatch.
+ ///
+ [Fact]
+ public async Task VerifyKernelFunctionSelectionStrategyThrowsOnNullResultAsync()
+ {
+ // Arrange
+ MockAgent mockAgent = new();
+ KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin(mockAgent.Id));
+
+ KernelFunctionSelectionStrategy strategy =
+ new(plugin.Single(), new())
+ {
+ Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Name } },
+ ResultParser = (result) => "larry",
+ };
+
+ // Act and Assert
+ await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent], []));
}
///
/// Verify default state and behavior
@@ -49,21 +73,21 @@ public async Task VerifyKernelFunctionSelectionStrategyDefaultsAsync()
[Fact]
public async Task VerifyKernelFunctionSelectionStrategyInitialAgentAsync()
{
- Mock mockAgent1 = new();
- Mock mockAgent2 = new();
- KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin(mockAgent2.Object.Id));
+ MockAgent mockAgent1 = new();
+ MockAgent mockAgent2 = new();
+ KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin(mockAgent2.Id));
KernelFunctionSelectionStrategy strategy =
new(plugin.Single(), new())
{
- InitialAgent = mockAgent1.Object,
+ InitialAgent = mockAgent1,
ResultParser = (result) => result.GetValue() ?? string.Empty,
};
- Agent nextAgent = await strategy.NextAsync([mockAgent2.Object], []);
+ Agent nextAgent = await strategy.NextAsync([mockAgent2], []);
Assert.NotNull(nextAgent);
- Assert.Equal(mockAgent1.Object, nextAgent);
+ Assert.Equal(mockAgent1, nextAgent);
}
///
@@ -72,25 +96,25 @@ public async Task VerifyKernelFunctionSelectionStrategyInitialAgentAsync()
[Fact]
public async Task VerifyKernelFunctionSelectionStrategyNullAgentAsync()
{
- Mock mockAgent = new();
+ MockAgent mockAgent = new();
KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin(null));
KernelFunctionSelectionStrategy strategy =
new(plugin.Single(), new())
{
- Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Object.Name } },
+ Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Name } },
};
- await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent.Object], []));
+ await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent], []));
strategy =
new(plugin.Single(), new())
{
- Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Object.Name } },
+ Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Name } },
UseInitialAgentAsFallback = true
};
- await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent.Object], []));
+ await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent], []));
}
///
@@ -99,25 +123,27 @@ public async Task VerifyKernelFunctionSelectionStrategyNullAgentAsync()
[Fact]
public async Task VerifyKernelFunctionSelectionStrategyBadAgentFallbackWithNoInitialAgentAsync()
{
- Mock mockAgent = new();
+ // Arrange
+ MockAgent mockAgent = new();
KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin("bad"));
KernelFunctionSelectionStrategy strategy =
new(plugin.Single(), new())
{
- Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Object.Name } },
+ Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Name } },
};
- await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent.Object], []));
+ await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent], []));
strategy =
new(plugin.Single(), new())
{
- Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Object.Name } },
+ Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Name } },
UseInitialAgentAsFallback = true
};
- await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent.Object], []));
+ // Act and Assert
+ await Assert.ThrowsAsync(() => strategy.NextAsync([mockAgent], []));
}
///
@@ -126,21 +152,21 @@ public async Task VerifyKernelFunctionSelectionStrategyBadAgentFallbackWithNoIni
[Fact]
public async Task VerifyKernelFunctionSelectionStrategyBadAgentFallbackAsync()
{
- Mock mockAgent = new();
+ MockAgent mockAgent = new();
KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin("bad"));
KernelFunctionSelectionStrategy strategy =
new(plugin.Single(), new())
{
- Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Object.Name } },
- InitialAgent = mockAgent.Object,
+ Arguments = new(new OpenAIPromptExecutionSettings()) { { "key", mockAgent.Name } },
+ InitialAgent = mockAgent,
UseInitialAgentAsFallback = true
};
- Agent nextAgent = await strategy.NextAsync([mockAgent.Object], []);
+ Agent nextAgent = await strategy.NextAsync([mockAgent], []);
Assert.NotNull(nextAgent);
- Assert.Equal(mockAgent.Object, nextAgent);
+ Assert.Equal(mockAgent, nextAgent);
}
private sealed class TestPlugin(string? agentName)
diff --git a/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionTerminationStrategyTests.cs b/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionTerminationStrategyTests.cs
index 6f0b446e5e7a..7ee5cf838bc3 100644
--- a/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionTerminationStrategyTests.cs
+++ b/dotnet/src/Agents/UnitTests/Core/Chat/KernelFunctionTerminationStrategyTests.cs
@@ -3,10 +3,8 @@
using System.Linq;
using System.Threading.Tasks;
using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.Chat;
using Microsoft.SemanticKernel.Connectors.OpenAI;
-using Moq;
using Xunit;
namespace SemanticKernel.Agents.UnitTests.Core.Chat;
@@ -22,17 +20,26 @@ public class KernelFunctionTerminationStrategyTests
[Fact]
public async Task VerifyKernelFunctionTerminationStrategyDefaultsAsync()
{
+ // Arrange
KernelPlugin plugin = KernelPluginFactory.CreateFromObject(new TestPlugin());
- KernelFunctionTerminationStrategy strategy = new(plugin.Single(), new());
+ KernelFunctionTerminationStrategy strategy =
+ new(plugin.Single(), new())
+ {
+ AgentVariableName = "agent",
+ HistoryVariableName = "history",
+ };
+ // Assert
Assert.Null(strategy.Arguments);
Assert.NotNull(strategy.Kernel);
Assert.NotNull(strategy.ResultParser);
+ Assert.NotEqual("agent", KernelFunctionTerminationStrategy.DefaultAgentVariableName);
+ Assert.NotEqual("history", KernelFunctionTerminationStrategy.DefaultHistoryVariableName);
- Mock mockAgent = new();
-
- bool isTerminating = await strategy.ShouldTerminateAsync(mockAgent.Object, []);
+ // Act
+ MockAgent mockAgent = new();
+ bool isTerminating = await strategy.ShouldTerminateAsync(mockAgent, []);
Assert.True(isTerminating);
}
@@ -52,9 +59,9 @@ public async Task VerifyKernelFunctionTerminationStrategyParsingAsync()
ResultParser = (result) => string.Equals("test", result.GetValue(), StringComparison.OrdinalIgnoreCase)
};
- Mock mockAgent = new();
+ MockAgent mockAgent = new();
- bool isTerminating = await strategy.ShouldTerminateAsync(mockAgent.Object, []);
+ bool isTerminating = await strategy.ShouldTerminateAsync(mockAgent, []);
Assert.True(isTerminating);
}
diff --git a/dotnet/src/Agents/UnitTests/Core/Chat/RegExTerminationStrategyTests.cs b/dotnet/src/Agents/UnitTests/Core/Chat/RegExTerminationStrategyTests.cs
index a1b739ae1d1e..196a89ded6e3 100644
--- a/dotnet/src/Agents/UnitTests/Core/Chat/RegExTerminationStrategyTests.cs
+++ b/dotnet/src/Agents/UnitTests/Core/Chat/RegExTerminationStrategyTests.cs
@@ -2,10 +2,8 @@
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.Chat;
using Microsoft.SemanticKernel.ChatCompletion;
-using Moq;
using Xunit;
namespace SemanticKernel.Agents.UnitTests.Core.Chat;
@@ -13,7 +11,7 @@ namespace SemanticKernel.Agents.UnitTests.Core.Chat;
///
/// Unit testing of .
///
-public class RegexTerminationStrategyTests
+public partial class RegexTerminationStrategyTests
{
///
/// Verify abililty of strategy to match expression.
@@ -21,10 +19,12 @@ public class RegexTerminationStrategyTests
[Fact]
public async Task VerifyExpressionTerminationStrategyAsync()
{
+ // Arrange
RegexTerminationStrategy strategy = new("test");
- Regex r = new("(?:^|\\W)test(?:$|\\W)");
+ Regex r = MyRegex();
+ // Act and Assert
await VerifyResultAsync(
expectedResult: false,
new(r),
@@ -38,9 +38,17 @@ await VerifyResultAsync(
private static async Task VerifyResultAsync(bool expectedResult, RegexTerminationStrategy strategyRoot, string content)
{
+ // Arrange
ChatMessageContent message = new(AuthorRole.Assistant, content);
- Mock agent = new();
- var result = await strategyRoot.ShouldTerminateAsync(agent.Object, [message]);
+ MockAgent agent = new();
+
+ // Act
+ var result = await strategyRoot.ShouldTerminateAsync(agent, [message]);
+
+ // Assert
Assert.Equal(expectedResult, result);
}
+
+ [GeneratedRegex("(?:^|\\W)test(?:$|\\W)")]
+ private static partial Regex MyRegex();
}
diff --git a/dotnet/src/Agents/UnitTests/Core/Chat/SequentialSelectionStrategyTests.cs b/dotnet/src/Agents/UnitTests/Core/Chat/SequentialSelectionStrategyTests.cs
index bb8fb4665b36..2d06fb6d0078 100644
--- a/dotnet/src/Agents/UnitTests/Core/Chat/SequentialSelectionStrategyTests.cs
+++ b/dotnet/src/Agents/UnitTests/Core/Chat/SequentialSelectionStrategyTests.cs
@@ -3,7 +3,6 @@
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.Chat;
-using Moq;
using Xunit;
namespace SemanticKernel.Agents.UnitTests.Core.Chat;
@@ -19,24 +18,27 @@ public class SequentialSelectionStrategyTests
[Fact]
public async Task VerifySequentialSelectionStrategyTurnsAsync()
{
- Mock agent1 = new();
- Mock agent2 = new();
+ // Arrange
+ MockAgent agent1 = new();
+ MockAgent agent2 = new();
- Agent[] agents = [agent1.Object, agent2.Object];
+ Agent[] agents = [agent1, agent2];
SequentialSelectionStrategy strategy = new();
- await VerifyNextAgentAsync(agent1.Object, agents, strategy);
- await VerifyNextAgentAsync(agent2.Object, agents, strategy);
- await VerifyNextAgentAsync(agent1.Object, agents, strategy);
- await VerifyNextAgentAsync(agent2.Object, agents, strategy);
- await VerifyNextAgentAsync(agent1.Object, agents, strategy);
+ // Act and Assert
+ await VerifyNextAgentAsync(agent1, agents, strategy);
+ await VerifyNextAgentAsync(agent2, agents, strategy);
+ await VerifyNextAgentAsync(agent1, agents, strategy);
+ await VerifyNextAgentAsync(agent2, agents, strategy);
+ await VerifyNextAgentAsync(agent1, agents, strategy);
+ // Arrange
strategy.Reset();
- await VerifyNextAgentAsync(agent1.Object, agents, strategy);
+ await VerifyNextAgentAsync(agent1, agents, strategy);
// Verify index does not exceed current bounds.
- agents = [agent1.Object];
- await VerifyNextAgentAsync(agent1.Object, agents, strategy);
+ agents = [agent1];
+ await VerifyNextAgentAsync(agent1, agents, strategy);
}
///
@@ -45,25 +47,18 @@ public async Task VerifySequentialSelectionStrategyTurnsAsync()
[Fact]
public async Task VerifySequentialSelectionStrategyInitialAgentAsync()
{
- Mock agent1 = new();
- Mock agent2 = new();
+ MockAgent agent1 = new();
+ MockAgent agent2 = new();
- Agent[] agents = [agent1.Object, agent2.Object];
+ Agent[] agents = [agent1, agent2];
SequentialSelectionStrategy strategy =
new()
{
- InitialAgent = agent2.Object
+ InitialAgent = agent2
};
- await VerifyNextAgentAsync(agent2.Object, agents, strategy);
- await VerifyNextAgentAsync(agent1.Object, agents, strategy);
- }
-
- private static async Task VerifyNextAgentAsync(Agent expectedAgent, Agent[] agents, SequentialSelectionStrategy strategy)
- {
- Agent? nextAgent = await strategy.NextAsync(agents, []);
- Assert.NotNull(nextAgent);
- Assert.Equal(expectedAgent.Id, nextAgent.Id);
+ await VerifyNextAgentAsync(agent2, agents, strategy);
+ await VerifyNextAgentAsync(agent1, agents, strategy);
}
///
@@ -72,7 +67,19 @@ private static async Task VerifyNextAgentAsync(Agent expectedAgent, Agent[] agen
[Fact]
public async Task VerifySequentialSelectionStrategyEmptyAsync()
{
+ // Arrange
SequentialSelectionStrategy strategy = new();
+
+ // Act and Assert
await Assert.ThrowsAsync(() => strategy.NextAsync([], []));
}
+
+ private static async Task VerifyNextAgentAsync(Agent expectedAgent, Agent[] agents, SequentialSelectionStrategy strategy)
+ {
+ // Act
+ Agent? nextAgent = await strategy.NextAsync(agents, []);
+ // Assert
+ Assert.NotNull(nextAgent);
+ Assert.Equal(expectedAgent.Id, nextAgent.Id);
+ }
}
diff --git a/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs b/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs
index c8a1c0578613..01debd8ded5f 100644
--- a/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs
+++ b/dotnet/src/Agents/UnitTests/Core/ChatCompletionAgentTests.cs
@@ -5,6 +5,7 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents;
+using Microsoft.SemanticKernel.Agents.History;
using Microsoft.SemanticKernel.ChatCompletion;
using Moq;
using Xunit;
@@ -22,6 +23,7 @@ public class ChatCompletionAgentTests
[Fact]
public void VerifyChatCompletionAgentDefinition()
{
+ // Arrange
ChatCompletionAgent agent =
new()
{
@@ -30,6 +32,7 @@ public void VerifyChatCompletionAgentDefinition()
Name = "test name",
};
+ // Assert
Assert.NotNull(agent.Id);
Assert.Equal("test instructions", agent.Instructions);
Assert.Equal("test description", agent.Description);
@@ -43,7 +46,8 @@ public void VerifyChatCompletionAgentDefinition()
[Fact]
public async Task VerifyChatCompletionAgentInvocationAsync()
{
- var mockService = new Mock();
+ // Arrange
+ Mock mockService = new();
mockService.Setup(
s => s.GetChatMessageContentsAsync(
It.IsAny(),
@@ -51,16 +55,18 @@ public async Task VerifyChatCompletionAgentInvocationAsync()
It.IsAny(),
It.IsAny())).ReturnsAsync([new(AuthorRole.Assistant, "what?")]);
- var agent =
- new ChatCompletionAgent()
+ ChatCompletionAgent agent =
+ new()
{
Instructions = "test instructions",
Kernel = CreateKernel(mockService.Object),
Arguments = [],
};
- var result = await agent.InvokeAsync([]).ToArrayAsync();
+ // Act
+ ChatMessageContent[] result = await agent.InvokeAsync([]).ToArrayAsync();
+ // Assert
Assert.Single(result);
mockService.Verify(
@@ -79,13 +85,14 @@ public async Task VerifyChatCompletionAgentInvocationAsync()
[Fact]
public async Task VerifyChatCompletionAgentStreamingAsync()
{
+ // Arrange
StreamingChatMessageContent[] returnContent =
[
new(AuthorRole.Assistant, "wh"),
new(AuthorRole.Assistant, "at?"),
];
- var mockService = new Mock();
+ Mock mockService = new();
mockService.Setup(
s => s.GetStreamingChatMessageContentsAsync(
It.IsAny(),
@@ -93,16 +100,18 @@ public async Task VerifyChatCompletionAgentStreamingAsync()
It.IsAny(),
It.IsAny())).Returns(returnContent.ToAsyncEnumerable());
- var agent =
- new ChatCompletionAgent()
+ ChatCompletionAgent agent =
+ new()
{
Instructions = "test instructions",
Kernel = CreateKernel(mockService.Object),
Arguments = [],
};
- var result = await agent.InvokeStreamingAsync([]).ToArrayAsync();
+ // Act
+ StreamingChatMessageContent[] result = await agent.InvokeStreamingAsync([]).ToArrayAsync();
+ // Assert
Assert.Equal(2, result.Length);
mockService.Verify(
@@ -115,6 +124,52 @@ public async Task VerifyChatCompletionAgentStreamingAsync()
Times.Once);
}
+ ///
+ /// Verify the invocation and response of .
+ ///
+ [Fact]
+ public void VerifyChatCompletionServiceSelection()
+ {
+ // Arrange
+ Mock mockService = new();
+ Kernel kernel = CreateKernel(mockService.Object);
+
+ // Act
+ (IChatCompletionService service, PromptExecutionSettings? settings) = ChatCompletionAgent.GetChatCompletionService(kernel, null);
+ // Assert
+ Assert.Equal(mockService.Object, service);
+ Assert.Null(settings);
+
+ // Act
+ (service, settings) = ChatCompletionAgent.GetChatCompletionService(kernel, []);
+ // Assert
+ Assert.Equal(mockService.Object, service);
+ Assert.Null(settings);
+
+ // Act and Assert
+ Assert.Throws(() => ChatCompletionAgent.GetChatCompletionService(kernel, new KernelArguments(new PromptExecutionSettings() { ServiceId = "anything" })));
+ }
+
+ ///
+ /// Verify the invocation and response of .
+ ///
+ [Fact]
+ public void VerifyChatCompletionChannelKeys()
+ {
+ // Arrange
+ ChatCompletionAgent agent1 = new();
+ ChatCompletionAgent agent2 = new();
+ ChatCompletionAgent agent3 = new() { HistoryReducer = new ChatHistoryTruncationReducer(50) };
+ ChatCompletionAgent agent4 = new() { HistoryReducer = new ChatHistoryTruncationReducer(50) };
+ ChatCompletionAgent agent5 = new() { HistoryReducer = new ChatHistoryTruncationReducer(100) };
+
+ // Act ans Assert
+ Assert.Equal(agent1.GetChannelKeys(), agent2.GetChannelKeys());
+ Assert.Equal(agent3.GetChannelKeys(), agent4.GetChannelKeys());
+ Assert.NotEqual(agent1.GetChannelKeys(), agent3.GetChannelKeys());
+ Assert.NotEqual(agent3.GetChannelKeys(), agent5.GetChannelKeys());
+ }
+
private static Kernel CreateKernel(IChatCompletionService chatCompletionService)
{
var builder = Kernel.CreateBuilder();
diff --git a/dotnet/src/Agents/UnitTests/Core/ChatHistoryChannelTests.cs b/dotnet/src/Agents/UnitTests/Core/ChatHistoryChannelTests.cs
index 6732da6628e8..92aca7fadb67 100644
--- a/dotnet/src/Agents/UnitTests/Core/ChatHistoryChannelTests.cs
+++ b/dotnet/src/Agents/UnitTests/Core/ChatHistoryChannelTests.cs
@@ -1,11 +1,9 @@
// Copyright (c) Microsoft. All rights reserved.
-using System;
-using System.Collections.Generic;
using System.Linq;
-using System.Threading;
using System.Threading.Tasks;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents;
+using Moq;
using Xunit;
namespace SemanticKernel.Agents.UnitTests.Core;
@@ -22,21 +20,11 @@ public class ChatHistoryChannelTests
[Fact]
public async Task VerifyAgentWithoutIChatHistoryHandlerAsync()
{
- TestAgent agent = new(); // Not a IChatHistoryHandler
+ // Arrange
+ Mock agent = new(); // Not a IChatHistoryHandler
ChatHistoryChannel channel = new(); // Requires IChatHistoryHandler
- await Assert.ThrowsAsync(() => channel.InvokeAsync(agent).ToArrayAsync().AsTask());
- }
-
- private sealed class TestAgent : KernelAgent
- {
- protected internal override Task CreateChannelAsync(CancellationToken cancellationToken)
- {
- throw new NotImplementedException();
- }
- protected internal override IEnumerable GetChannelKeys()
- {
- throw new NotImplementedException();
- }
+ // Act & Assert
+ await Assert.ThrowsAsync(() => channel.InvokeAsync(agent.Object).ToArrayAsync().AsTask());
}
}
diff --git a/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryReducerExtensionsTests.cs b/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryReducerExtensionsTests.cs
index a75533474147..d9042305d9fa 100644
--- a/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryReducerExtensionsTests.cs
+++ b/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryReducerExtensionsTests.cs
@@ -30,8 +30,10 @@ public class ChatHistoryReducerExtensionsTests
[InlineData(100, 0, int.MaxValue, 100)]
public void VerifyChatHistoryExtraction(int messageCount, int startIndex, int? endIndex = null, int? expectedCount = null)
{
+ // Arrange
ChatHistory history = [.. MockHistoryGenerator.CreateSimpleHistory(messageCount)];
+ // Act
ChatMessageContent[] extractedHistory = history.Extract(startIndex, endIndex).ToArray();
int finalIndex = endIndex ?? messageCount - 1;
@@ -39,6 +41,7 @@ public void VerifyChatHistoryExtraction(int messageCount, int startIndex, int? e
expectedCount ??= finalIndex - startIndex + 1;
+ // Assert
Assert.Equal(expectedCount, extractedHistory.Length);
if (extractedHistory.Length > 0)
@@ -58,16 +61,19 @@ public void VerifyChatHistoryExtraction(int messageCount, int startIndex, int? e
[InlineData(100, 0)]
public void VerifyGetFinalSummaryIndex(int summaryCount, int regularCount)
{
+ // Arrange
ChatHistory summaries = [.. MockHistoryGenerator.CreateSimpleHistory(summaryCount)];
foreach (ChatMessageContent summary in summaries)
{
summary.Metadata = new Dictionary() { { "summary", true } };
}
+ // Act
ChatHistory history = [.. summaries, .. MockHistoryGenerator.CreateSimpleHistory(regularCount)];
int finalSummaryIndex = history.LocateSummarizationBoundary("summary");
+ // Assert
Assert.Equal(summaryCount, finalSummaryIndex);
}
@@ -77,17 +83,22 @@ public void VerifyGetFinalSummaryIndex(int summaryCount, int regularCount)
[Fact]
public async Task VerifyChatHistoryNotReducedAsync()
{
+ // Arrange
ChatHistory history = [];
+ Mock mockReducer = new();
+ mockReducer.Setup(r => r.ReduceAsync(It.IsAny>(), default)).ReturnsAsync((IEnumerable?)null);
+ // Act
bool isReduced = await history.ReduceAsync(null, default);
+ // Assert
Assert.False(isReduced);
Assert.Empty(history);
- Mock mockReducer = new();
- mockReducer.Setup(r => r.ReduceAsync(It.IsAny>(), default)).ReturnsAsync((IEnumerable?)null);
+ // Act
isReduced = await history.ReduceAsync(mockReducer.Object, default);
+ // Assert
Assert.False(isReduced);
Assert.Empty(history);
}
@@ -98,13 +109,16 @@ public async Task VerifyChatHistoryNotReducedAsync()
[Fact]
public async Task VerifyChatHistoryReducedAsync()
{
+ // Arrange
Mock mockReducer = new();
mockReducer.Setup(r => r.ReduceAsync(It.IsAny>(), default)).ReturnsAsync((IEnumerable?)[]);
ChatHistory history = [.. MockHistoryGenerator.CreateSimpleHistory(10)];
+ // Act
bool isReduced = await history.ReduceAsync(mockReducer.Object, default);
+ // Assert
Assert.True(isReduced);
Assert.Empty(history);
}
@@ -124,11 +138,13 @@ public async Task VerifyChatHistoryReducedAsync()
[InlineData(900, 500, int.MaxValue)]
public void VerifyLocateSafeReductionIndexNone(int messageCount, int targetCount, int? thresholdCount = null)
{
- // Shape of history doesn't matter since reduction is not expected
+ // Arrange: Shape of history doesn't matter since reduction is not expected
ChatHistory sourceHistory = [.. MockHistoryGenerator.CreateHistoryWithUserInput(messageCount)];
+ // Act
int reductionIndex = sourceHistory.LocateSafeReductionIndex(targetCount, thresholdCount);
+ // Assert
Assert.Equal(0, reductionIndex);
}
@@ -146,11 +162,13 @@ public void VerifyLocateSafeReductionIndexNone(int messageCount, int targetCount
[InlineData(1000, 500, 499)]
public void VerifyLocateSafeReductionIndexFound(int messageCount, int targetCount, int? thresholdCount = null)
{
- // Generate history with only assistant messages
+ // Arrange: Generate history with only assistant messages
ChatHistory sourceHistory = [.. MockHistoryGenerator.CreateSimpleHistory(messageCount)];
+ // Act
int reductionIndex = sourceHistory.LocateSafeReductionIndex(targetCount, thresholdCount);
+ // Assert
Assert.True(reductionIndex > 0);
Assert.Equal(targetCount, messageCount - reductionIndex);
}
@@ -170,17 +188,20 @@ public void VerifyLocateSafeReductionIndexFound(int messageCount, int targetCoun
[InlineData(1000, 500, 499)]
public void VerifyLocateSafeReductionIndexFoundWithUser(int messageCount, int targetCount, int? thresholdCount = null)
{
- // Generate history with alternating user and assistant messages
+ // Arrange: Generate history with alternating user and assistant messages
ChatHistory sourceHistory = [.. MockHistoryGenerator.CreateHistoryWithUserInput(messageCount)];
+ // Act
int reductionIndex = sourceHistory.LocateSafeReductionIndex(targetCount, thresholdCount);
+ // Assert
Assert.True(reductionIndex > 0);
- // The reduction length should align with a user message, if threshold is specified
+ // Act: The reduction length should align with a user message, if threshold is specified
bool hasThreshold = thresholdCount > 0;
int expectedCount = targetCount + (hasThreshold && sourceHistory[^targetCount].Role != AuthorRole.User ? 1 : 0);
+ // Assert
Assert.Equal(expectedCount, messageCount - reductionIndex);
}
@@ -201,14 +222,16 @@ public void VerifyLocateSafeReductionIndexFoundWithUser(int messageCount, int ta
[InlineData(9)]
public void VerifyLocateSafeReductionIndexWithFunctionContent(int targetCount, int? thresholdCount = null)
{
- // Generate a history with function call on index 5 and 9 and
+ // Arrange: Generate a history with function call on index 5 and 9 and
// function result on index 6 and 10 (total length: 14)
ChatHistory sourceHistory = [.. MockHistoryGenerator.CreateHistoryWithFunctionContent()];
ChatHistoryTruncationReducer reducer = new(targetCount, thresholdCount);
+ // Act
int reductionIndex = sourceHistory.LocateSafeReductionIndex(targetCount, thresholdCount);
+ // Assert
Assert.True(reductionIndex > 0);
// The reduction length avoid splitting function call and result, regardless of threshold
@@ -216,7 +239,7 @@ public void VerifyLocateSafeReductionIndexWithFunctionContent(int targetCount, i
if (sourceHistory[sourceHistory.Count - targetCount].Items.Any(i => i is FunctionCallContent))
{
- expectedCount += 1;
+ expectedCount++;
}
else if (sourceHistory[sourceHistory.Count - targetCount].Items.Any(i => i is FunctionResultContent))
{
diff --git a/dotnet/src/Agents/UnitTests/Core/History/ChatHistorySummarizationReducerTests.cs b/dotnet/src/Agents/UnitTests/Core/History/ChatHistorySummarizationReducerTests.cs
index f464b6a8214a..53e93d0026c3 100644
--- a/dotnet/src/Agents/UnitTests/Core/History/ChatHistorySummarizationReducerTests.cs
+++ b/dotnet/src/Agents/UnitTests/Core/History/ChatHistorySummarizationReducerTests.cs
@@ -23,10 +23,12 @@ public class ChatHistorySummarizationReducerTests
[InlineData(-1)]
[InlineData(-1, int.MaxValue)]
[InlineData(int.MaxValue, -1)]
- public void VerifyChatHistoryConstructorArgumentValidation(int targetCount, int? thresholdCount = null)
+ public void VerifyConstructorArgumentValidation(int targetCount, int? thresholdCount = null)
{
+ // Arrange
Mock mockCompletionService = this.CreateMockCompletionService();
+ // Act & Assert
Assert.Throws(() => new ChatHistorySummarizationReducer(mockCompletionService.Object, targetCount, thresholdCount));
}
@@ -34,15 +36,17 @@ public void VerifyChatHistoryConstructorArgumentValidation(int targetCount, int?
/// Verify object state after initialization.
///
[Fact]
- public void VerifyChatHistoryInitializationState()
+ public void VerifyInitializationState()
{
+ // Arrange
Mock mockCompletionService = this.CreateMockCompletionService();
-
ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10);
+ // Assert
Assert.Equal(ChatHistorySummarizationReducer.DefaultSummarizationPrompt, reducer.SummarizationInstructions);
Assert.True(reducer.FailOnError);
+ // Act
reducer =
new(mockCompletionService.Object, 10)
{
@@ -50,25 +54,62 @@ public void VerifyChatHistoryInitializationState()
SummarizationInstructions = "instructions",
};
+ // Assert
Assert.NotEqual(ChatHistorySummarizationReducer.DefaultSummarizationPrompt, reducer.SummarizationInstructions);
Assert.False(reducer.FailOnError);
}
+ ///
+ /// Validate equality override.
+ ///
+ [Fact]
+ public void VerifyEquality()
+ {
+ // Arrange
+ Mock mockCompletionService = this.CreateMockCompletionService();
+
+ ChatHistorySummarizationReducer reducer1 = new(mockCompletionService.Object, 3, 3);
+ ChatHistorySummarizationReducer reducer2 = new(mockCompletionService.Object, 3, 3);
+ ChatHistorySummarizationReducer reducer3 = new(mockCompletionService.Object, 3, 3) { UseSingleSummary = false };
+ ChatHistorySummarizationReducer reducer4 = new(mockCompletionService.Object, 3, 3) { SummarizationInstructions = "override" };
+ ChatHistorySummarizationReducer reducer5 = new(mockCompletionService.Object, 4, 3);
+ ChatHistorySummarizationReducer reducer6 = new(mockCompletionService.Object, 3, 5);
+ ChatHistorySummarizationReducer reducer7 = new(mockCompletionService.Object, 3);
+ ChatHistorySummarizationReducer reducer8 = new(mockCompletionService.Object, 3);
+
+ // Assert
+ Assert.True(reducer1.Equals(reducer1));
+ Assert.True(reducer1.Equals(reducer2));
+ Assert.True(reducer7.Equals(reducer8));
+ Assert.True(reducer3.Equals(reducer3));
+ Assert.True(reducer4.Equals(reducer4));
+ Assert.False(reducer1.Equals(reducer3));
+ Assert.False(reducer1.Equals(reducer4));
+ Assert.False(reducer1.Equals(reducer5));
+ Assert.False(reducer1.Equals(reducer6));
+ Assert.False(reducer1.Equals(reducer7));
+ Assert.False(reducer1.Equals(reducer8));
+ Assert.False(reducer1.Equals(null));
+ }
+
///
/// Validate hash-code expresses reducer equivalency.
///
[Fact]
- public void VerifyChatHistoryHasCode()
+ public void VerifyHashCode()
{
+ // Arrange
HashSet reducers = [];
Mock mockCompletionService = this.CreateMockCompletionService();
+ // Act
int hashCode1 = GenerateHashCode(3, 4);
int hashCode2 = GenerateHashCode(33, 44);
int hashCode3 = GenerateHashCode(3000, 4000);
int hashCode4 = GenerateHashCode(3000, 4000);
+ // Assert
Assert.NotEqual(hashCode1, hashCode2);
Assert.NotEqual(hashCode2, hashCode3);
Assert.Equal(hashCode3, hashCode4);
@@ -90,12 +131,15 @@ int GenerateHashCode(int targetCount, int thresholdCount)
[Fact]
public async Task VerifyChatHistoryReductionSilentFailureAsync()
{
+ // Arrange
Mock mockCompletionService = this.CreateMockCompletionService(throwException: true);
IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray();
-
ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10) { FailOnError = false };
+
+ // Act
IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory);
+ // Assert
Assert.Null(reducedHistory);
}
@@ -105,10 +149,12 @@ public async Task VerifyChatHistoryReductionSilentFailureAsync()
[Fact]
public async Task VerifyChatHistoryReductionThrowsOnFailureAsync()
{
+ // Arrange
Mock mockCompletionService = this.CreateMockCompletionService(throwException: true);
IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray();
-
ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10);
+
+ // Act and Assert
await Assert.ThrowsAsync(() => reducer.ReduceAsync(sourceHistory));
}
@@ -118,12 +164,15 @@ public async Task VerifyChatHistoryReductionThrowsOnFailureAsync()
[Fact]
public async Task VerifyChatHistoryNotReducedAsync()
{
+ // Arrange
Mock mockCompletionService = this.CreateMockCompletionService();
IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray();
-
ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 20);
+
+ // Act
IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory);
+ // Assert
Assert.Null(reducedHistory);
}
@@ -133,12 +182,15 @@ public async Task VerifyChatHistoryNotReducedAsync()
[Fact]
public async Task VerifyChatHistoryReducedAsync()
{
+ // Arrange
Mock mockCompletionService = this.CreateMockCompletionService();
IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray();
-
ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10);
+
+ // Act
IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory);
+ // Assert
ChatMessageContent[] messages = VerifyReducedHistory(reducedHistory, 11);
VerifySummarization(messages[0]);
}
@@ -149,19 +201,24 @@ public async Task VerifyChatHistoryReducedAsync()
[Fact]
public async Task VerifyChatHistoryRereducedAsync()
{
+ // Arrange
Mock mockCompletionService = this.CreateMockCompletionService();
IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray();
-
ChatHistorySummarizationReducer reducer = new(mockCompletionService.Object, 10);
+
+ // Act
IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory);
reducedHistory = await reducer.ReduceAsync([.. reducedHistory!, .. sourceHistory]);
+ // Assert
ChatMessageContent[] messages = VerifyReducedHistory(reducedHistory, 11);
VerifySummarization(messages[0]);
+ // Act
reducer = new(mockCompletionService.Object, 10) { UseSingleSummary = false };
reducedHistory = await reducer.ReduceAsync([.. reducedHistory!, .. sourceHistory]);
+ // Assert
messages = VerifyReducedHistory(reducedHistory, 12);
VerifySummarization(messages[0]);
VerifySummarization(messages[1]);
diff --git a/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryTruncationReducerTests.cs b/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryTruncationReducerTests.cs
index eebcf8fc6136..9d8b2e721fdf 100644
--- a/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryTruncationReducerTests.cs
+++ b/dotnet/src/Agents/UnitTests/Core/History/ChatHistoryTruncationReducerTests.cs
@@ -21,24 +21,54 @@ public class ChatHistoryTruncationReducerTests
[InlineData(-1)]
[InlineData(-1, int.MaxValue)]
[InlineData(int.MaxValue, -1)]
- public void VerifyChatHistoryConstructorArgumentValidation(int targetCount, int? thresholdCount = null)
+ public void VerifyConstructorArgumentValidation(int targetCount, int? thresholdCount = null)
{
+ // Act and Assert
Assert.Throws(() => new ChatHistoryTruncationReducer(targetCount, thresholdCount));
}
+ ///
+ /// Validate equality override.
+ ///
+ [Fact]
+ public void VerifyEquality()
+ {
+ // Arrange
+ ChatHistoryTruncationReducer reducer1 = new(3, 3);
+ ChatHistoryTruncationReducer reducer2 = new(3, 3);
+ ChatHistoryTruncationReducer reducer3 = new(4, 3);
+ ChatHistoryTruncationReducer reducer4 = new(3, 5);
+ ChatHistoryTruncationReducer reducer5 = new(3);
+ ChatHistoryTruncationReducer reducer6 = new(3);
+
+ // Assert
+ Assert.True(reducer1.Equals(reducer1));
+ Assert.True(reducer1.Equals(reducer2));
+ Assert.True(reducer5.Equals(reducer6));
+ Assert.True(reducer3.Equals(reducer3));
+ Assert.False(reducer1.Equals(reducer3));
+ Assert.False(reducer1.Equals(reducer4));
+ Assert.False(reducer1.Equals(reducer5));
+ Assert.False(reducer1.Equals(reducer6));
+ Assert.False(reducer1.Equals(null));
+ }
+
///
/// Validate hash-code expresses reducer equivalency.
///
[Fact]
- public void VerifyChatHistoryHasCode()
+ public void VerifyHashCode()
{
+ // Arrange
HashSet reducers = [];
+ // Act
int hashCode1 = GenerateHashCode(3, 4);
int hashCode2 = GenerateHashCode(33, 44);
int hashCode3 = GenerateHashCode(3000, 4000);
int hashCode4 = GenerateHashCode(3000, 4000);
+ // Assert
Assert.NotEqual(hashCode1, hashCode2);
Assert.NotEqual(hashCode2, hashCode3);
Assert.Equal(hashCode3, hashCode4);
@@ -60,11 +90,14 @@ int GenerateHashCode(int targetCount, int thresholdCount)
[Fact]
public async Task VerifyChatHistoryNotReducedAsync()
{
+ // Arrange
IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(10).ToArray();
-
ChatHistoryTruncationReducer reducer = new(20);
+
+ // Act
IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory);
+ // Assert
Assert.Null(reducedHistory);
}
@@ -74,11 +107,14 @@ public async Task VerifyChatHistoryNotReducedAsync()
[Fact]
public async Task VerifyChatHistoryReducedAsync()
{
+ // Arrange
IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray();
-
ChatHistoryTruncationReducer reducer = new(10);
+
+ // Act
IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory);
+ // Assert
VerifyReducedHistory(reducedHistory, 10);
}
@@ -88,12 +124,15 @@ public async Task VerifyChatHistoryReducedAsync()
[Fact]
public async Task VerifyChatHistoryRereducedAsync()
{
+ // Arrange
IReadOnlyList sourceHistory = MockHistoryGenerator.CreateSimpleHistory(20).ToArray();
-
ChatHistoryTruncationReducer reducer = new(10);
+
+ // Act
IEnumerable? reducedHistory = await reducer.ReduceAsync(sourceHistory);
reducedHistory = await reducer.ReduceAsync([.. reducedHistory!, .. sourceHistory]);
+ // Assert
VerifyReducedHistory(reducedHistory, 10);
}
diff --git a/dotnet/src/Agents/UnitTests/Extensions/ChatHistoryExtensionsTests.cs b/dotnet/src/Agents/UnitTests/Extensions/ChatHistoryExtensionsTests.cs
index 14a938a7b169..d7f370e3734c 100644
--- a/dotnet/src/Agents/UnitTests/Extensions/ChatHistoryExtensionsTests.cs
+++ b/dotnet/src/Agents/UnitTests/Extensions/ChatHistoryExtensionsTests.cs
@@ -19,10 +19,12 @@ public class ChatHistoryExtensionsTests
[Fact]
public void VerifyChatHistoryOrdering()
{
+ // Arrange
ChatHistory history = [];
history.AddUserMessage("Hi");
history.AddAssistantMessage("Hi");
+ // Act and Assert
VerifyRole(AuthorRole.User, history.First());
VerifyRole(AuthorRole.Assistant, history.Last());
@@ -36,10 +38,12 @@ public void VerifyChatHistoryOrdering()
[Fact]
public async Task VerifyChatHistoryOrderingAsync()
{
+ // Arrange
ChatHistory history = [];
history.AddUserMessage("Hi");
history.AddAssistantMessage("Hi");
+ // Act and Assert
VerifyRole(AuthorRole.User, history.First());
VerifyRole(AuthorRole.Assistant, history.Last());
diff --git a/dotnet/src/Agents/UnitTests/Internal/BroadcastQueueTests.cs b/dotnet/src/Agents/UnitTests/Internal/BroadcastQueueTests.cs
index 987c67fce804..720197a90c55 100644
--- a/dotnet/src/Agents/UnitTests/Internal/BroadcastQueueTests.cs
+++ b/dotnet/src/Agents/UnitTests/Internal/BroadcastQueueTests.cs
@@ -22,8 +22,10 @@ public class BroadcastQueueTests
[Fact]
public void VerifyBroadcastQueueDefaultConfiguration()
{
+ // Arrange
BroadcastQueue queue = new();
+ // Assert
Assert.True(queue.BlockDuration.TotalSeconds > 0);
}
@@ -33,7 +35,7 @@ public void VerifyBroadcastQueueDefaultConfiguration()
[Fact]
public async Task VerifyBroadcastQueueReceiveAsync()
{
- // Create queue and channel.
+ // Arrange: Create queue and channel.
BroadcastQueue queue =
new()
{
@@ -42,23 +44,31 @@ public async Task VerifyBroadcastQueueReceiveAsync()
TestChannel channel = new();
ChannelReference reference = new(channel, "test");
- // Verify initial state
+ // Act: Verify initial state
await VerifyReceivingStateAsync(receiveCount: 0, queue, channel, "test");
+
+ // Assert
Assert.Empty(channel.ReceivedMessages);
- // Verify empty invocation with no channels.
+ // Act: Verify empty invocation with no channels.
queue.Enqueue([], []);
await VerifyReceivingStateAsync(receiveCount: 0, queue, channel, "test");
+
+ // Assert
Assert.Empty(channel.ReceivedMessages);
- // Verify empty invocation of channel.
+ // Act: Verify empty invocation of channel.
queue.Enqueue([reference], []);
await VerifyReceivingStateAsync(receiveCount: 1, queue, channel, "test");
+
+ // Assert
Assert.Empty(channel.ReceivedMessages);
- // Verify expected invocation of channel.
+ // Act: Verify expected invocation of channel.
queue.Enqueue([reference], [new ChatMessageContent(AuthorRole.User, "hi")]);
await VerifyReceivingStateAsync(receiveCount: 2, queue, channel, "test");
+
+ // Assert
Assert.NotEmpty(channel.ReceivedMessages);
}
@@ -68,7 +78,7 @@ public async Task VerifyBroadcastQueueReceiveAsync()
[Fact]
public async Task VerifyBroadcastQueueFailureAsync()
{
- // Create queue and channel.
+ // Arrange: Create queue and channel.
BroadcastQueue queue =
new()
{
@@ -77,9 +87,10 @@ public async Task VerifyBroadcastQueueFailureAsync()
BadChannel channel = new();
ChannelReference reference = new(channel, "test");
- // Verify expected invocation of channel.
+ // Act: Verify expected invocation of channel.
queue.Enqueue([reference], [new ChatMessageContent(AuthorRole.User, "hi")]);
+ // Assert
await Assert.ThrowsAsync(() => queue.EnsureSynchronizedAsync(reference));
await Assert.ThrowsAsync(() => queue.EnsureSynchronizedAsync(reference));
await Assert.ThrowsAsync(() => queue.EnsureSynchronizedAsync(reference));
@@ -91,7 +102,7 @@ public async Task VerifyBroadcastQueueFailureAsync()
[Fact]
public async Task VerifyBroadcastQueueConcurrencyAsync()
{
- // Create queue and channel.
+ // Arrange: Create queue and channel.
BroadcastQueue queue =
new()
{
@@ -100,7 +111,7 @@ public async Task VerifyBroadcastQueueConcurrencyAsync()
TestChannel channel = new();
ChannelReference reference = new(channel, "test");
- // Enqueue multiple channels
+ // Act: Enqueue multiple channels
for (int count = 0; count < 10; ++count)
{
queue.Enqueue([new(channel, $"test{count}")], [new ChatMessageContent(AuthorRole.User, "hi")]);
@@ -112,7 +123,7 @@ public async Task VerifyBroadcastQueueConcurrencyAsync()
await queue.EnsureSynchronizedAsync(new ChannelReference(channel, $"test{count}"));
}
- // Verify result
+ // Assert
Assert.NotEmpty(channel.ReceivedMessages);
Assert.Equal(10, channel.ReceivedMessages.Count);
}
diff --git a/dotnet/src/Agents/UnitTests/Internal/KeyEncoderTests.cs b/dotnet/src/Agents/UnitTests/Internal/KeyEncoderTests.cs
index 0a9715f25115..13cc3203d58c 100644
--- a/dotnet/src/Agents/UnitTests/Internal/KeyEncoderTests.cs
+++ b/dotnet/src/Agents/UnitTests/Internal/KeyEncoderTests.cs
@@ -17,21 +17,24 @@ public class KeyEncoderTests
[Fact]
public void VerifyKeyEncoderUniqueness()
{
+ // Act
this.VerifyHashEquivalancy([]);
this.VerifyHashEquivalancy(nameof(KeyEncoderTests));
this.VerifyHashEquivalancy(nameof(KeyEncoderTests), "http://localhost", "zoo");
- // Verify "well-known" value
+ // Assert: Verify "well-known" value
string localHash = KeyEncoder.GenerateHash([typeof(ChatHistoryChannel).FullName!]);
Assert.Equal("Vdx37EnWT9BS+kkCkEgFCg9uHvHNw1+hXMA4sgNMKs4=", localHash);
}
private void VerifyHashEquivalancy(params string[] keys)
{
+ // Act
string hash1 = KeyEncoder.GenerateHash(keys);
string hash2 = KeyEncoder.GenerateHash(keys);
string hash3 = KeyEncoder.GenerateHash(keys.Concat(["another"]));
+ // Assert
Assert.Equal(hash1, hash2);
Assert.NotEqual(hash1, hash3);
}
diff --git a/dotnet/src/Agents/UnitTests/MockAgent.cs b/dotnet/src/Agents/UnitTests/MockAgent.cs
index b8b7f295e02b..6e20c0434b93 100644
--- a/dotnet/src/Agents/UnitTests/MockAgent.cs
+++ b/dotnet/src/Agents/UnitTests/MockAgent.cs
@@ -11,7 +11,7 @@ namespace SemanticKernel.Agents.UnitTests;
///
/// Mock definition of with a contract.
///
-internal sealed class MockAgent : ChatHistoryKernelAgent
+internal class MockAgent : ChatHistoryKernelAgent
{
public int InvokeCount { get; private set; }
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/AssertCollection.cs b/dotnet/src/Agents/UnitTests/OpenAI/AssertCollection.cs
new file mode 100644
index 000000000000..cd51c736ac18
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/OpenAI/AssertCollection.cs
@@ -0,0 +1,46 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System;
+using System.Collections.Generic;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.OpenAI;
+
+internal static class AssertCollection
+{
+ public static void Equal(IReadOnlyList? source, IReadOnlyList? target, Func? adapter = null)
+ {
+ if (source == null)
+ {
+ Assert.Null(target);
+ return;
+ }
+
+ Assert.NotNull(target);
+ Assert.Equal(source.Count, target.Count);
+
+ adapter ??= (x) => x;
+
+ for (int i = 0; i < source.Count; i++)
+ {
+ Assert.Equal(adapter(source[i]), adapter(target[i]));
+ }
+ }
+
+ public static void Equal(IReadOnlyDictionary? source, IReadOnlyDictionary? target)
+ {
+ if (source == null)
+ {
+ Assert.Null(target);
+ return;
+ }
+
+ Assert.NotNull(target);
+ Assert.Equal(source.Count, target.Count);
+
+ foreach ((TKey key, TValue value) in source)
+ {
+ Assert.True(target.TryGetValue(key, out TValue? targetValue));
+ Assert.Equal(value, targetValue);
+ }
+ }
+}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs
index b1e4d397eded..6288c6a5aed8 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/Azure/AddHeaderRequestPolicyTests.cs
@@ -2,7 +2,7 @@
using System.Linq;
using Azure.Core;
using Azure.Core.Pipeline;
-using Microsoft.SemanticKernel.Agents.OpenAI.Azure;
+using Microsoft.SemanticKernel.Agents.OpenAI.Internal;
using Xunit;
namespace SemanticKernel.Agents.UnitTests.OpenAI.Azure;
@@ -18,14 +18,17 @@ public class AddHeaderRequestPolicyTests
[Fact]
public void VerifyAddHeaderRequestPolicyExecution()
{
+ // Arrange
using HttpClientTransport clientTransport = new();
HttpPipeline pipeline = new(clientTransport);
HttpMessage message = pipeline.CreateMessage();
-
AddHeaderRequestPolicy policy = new(headerName: "testname", headerValue: "testvalue");
+
+ // Act
policy.OnSendingRequest(message);
+ // Assert
Assert.Single(message.Request.Headers);
HttpHeader header = message.Request.Headers.Single();
Assert.Equal("testname", header.Name);
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/AuthorRoleExtensionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/AuthorRoleExtensionsTests.cs
index 0b0a0707e49a..97dbf32903d6 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/AuthorRoleExtensionsTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/AuthorRoleExtensionsTests.cs
@@ -1,7 +1,7 @@
// Copyright (c) Microsoft. All rights reserved.
-using Azure.AI.OpenAI.Assistants;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
using Xunit;
using KernelExtensions = Microsoft.SemanticKernel.Agents.OpenAI;
@@ -29,7 +29,10 @@ public void VerifyToMessageRole()
private void VerifyRoleConversion(AuthorRole inputRole, MessageRole expectedRole)
{
+ // Arrange
MessageRole convertedRole = inputRole.ToMessageRole();
+
+ // Assert
Assert.Equal(expectedRole, convertedRole);
}
}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs
index 3f982f3a7b47..70c27ccb2152 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelExtensionsTests.cs
@@ -17,11 +17,15 @@ public class KernelExtensionsTests
[Fact]
public void VerifyGetKernelFunctionLookup()
{
+ // Arrange
Kernel kernel = new();
KernelPlugin plugin = KernelPluginFactory.CreateFromType();
kernel.Plugins.Add(plugin);
+ // Act
KernelFunction function = kernel.GetKernelFunction($"{nameof(TestPlugin)}-{nameof(TestPlugin.TestFunction)}", '-');
+
+ // Assert
Assert.NotNull(function);
Assert.Equal(nameof(TestPlugin.TestFunction), function.Name);
}
@@ -32,10 +36,12 @@ public void VerifyGetKernelFunctionLookup()
[Fact]
public void VerifyGetKernelFunctionInvalid()
{
+ // Arrange
Kernel kernel = new();
KernelPlugin plugin = KernelPluginFactory.CreateFromType();
kernel.Plugins.Add(plugin);
+ // Act and Assert
Assert.Throws(() => kernel.GetKernelFunction("a", '-'));
Assert.Throws(() => kernel.GetKernelFunction("a-b", ':'));
Assert.Throws(() => kernel.GetKernelFunction("a-b-c", '-'));
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelFunctionExtensionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelFunctionExtensionsTests.cs
index eeb8a4d3b9d1..acf195840366 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelFunctionExtensionsTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/Extensions/KernelFunctionExtensionsTests.cs
@@ -1,9 +1,9 @@
// Copyright (c) Microsoft. All rights reserved.
using System;
using System.ComponentModel;
-using Azure.AI.OpenAI.Assistants;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents.OpenAI;
+using OpenAI.Assistants;
using Xunit;
namespace SemanticKernel.Agents.UnitTests.OpenAI.Extensions;
@@ -19,18 +19,28 @@ public class KernelFunctionExtensionsTests
[Fact]
public void VerifyKernelFunctionToFunctionTool()
{
+ // Arrange
KernelPlugin plugin = KernelPluginFactory.CreateFromType();
+
+ // Assert
Assert.Equal(2, plugin.FunctionCount);
+ // Arrange
KernelFunction f1 = plugin[nameof(TestPlugin.TestFunction1)];
KernelFunction f2 = plugin[nameof(TestPlugin.TestFunction2)];
- FunctionToolDefinition definition1 = f1.ToToolDefinition("testplugin", "-");
- Assert.StartsWith($"testplugin-{nameof(TestPlugin.TestFunction1)}", definition1.Name, StringComparison.Ordinal);
+ // Act
+ FunctionToolDefinition definition1 = f1.ToToolDefinition("testplugin");
+
+ // Assert
+ Assert.StartsWith($"testplugin-{nameof(TestPlugin.TestFunction1)}", definition1.FunctionName, StringComparison.Ordinal);
Assert.Equal("test description", definition1.Description);
- FunctionToolDefinition definition2 = f2.ToToolDefinition("testplugin", "-");
- Assert.StartsWith($"testplugin-{nameof(TestPlugin.TestFunction2)}", definition2.Name, StringComparison.Ordinal);
+ // Act
+ FunctionToolDefinition definition2 = f2.ToToolDefinition("testplugin");
+
+ // Assert
+ Assert.StartsWith($"testplugin-{nameof(TestPlugin.TestFunction2)}", definition2.FunctionName, StringComparison.Ordinal);
Assert.Equal("test description", definition2.Description);
}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantMessageFactoryTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantMessageFactoryTests.cs
new file mode 100644
index 000000000000..50dec2cb95ae
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantMessageFactoryTests.cs
@@ -0,0 +1,210 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.OpenAI.Internal;
+using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.OpenAI.Internal;
+
+///
+/// Unit testing of .
+///
+public class AssistantMessageFactoryTests
+{
+ ///
+ /// Verify options creation.
+ ///
+ [Fact]
+ public void VerifyAssistantMessageAdapterCreateOptionsDefault()
+ {
+ // Arrange (Setup message with null metadata)
+ ChatMessageContent message = new(AuthorRole.User, "test");
+
+ // Act: Create options
+ MessageCreationOptions options = AssistantMessageFactory.CreateOptions(message);
+
+ // Assert
+ Assert.NotNull(options);
+ Assert.Empty(options.Metadata);
+ }
+
+ ///
+ /// Verify options creation.
+ ///
+ [Fact]
+ public void VerifyAssistantMessageAdapterCreateOptionsWithMetadataEmpty()
+ {
+ // Arrange Setup message with empty metadata
+ ChatMessageContent message =
+ new(AuthorRole.User, "test")
+ {
+ Metadata = new Dictionary()
+ };
+
+ // Act: Create options
+ MessageCreationOptions options = AssistantMessageFactory.CreateOptions(message);
+
+ // Assert
+ Assert.NotNull(options);
+ Assert.Empty(options.Metadata);
+ }
+
+ ///
+ /// Verify options creation.
+ ///
+ [Fact]
+ public void VerifyAssistantMessageAdapterCreateOptionsWithMetadata()
+ {
+ // Arrange: Setup message with metadata
+ ChatMessageContent message =
+ new(AuthorRole.User, "test")
+ {
+ Metadata =
+ new Dictionary()
+ {
+ { "a", 1 },
+ { "b", "2" },
+ }
+ };
+
+ // Act: Create options
+ MessageCreationOptions options = AssistantMessageFactory.CreateOptions(message);
+
+ // Assert
+ Assert.NotNull(options);
+ Assert.NotEmpty(options.Metadata);
+ Assert.Equal(2, options.Metadata.Count);
+ Assert.Equal("1", options.Metadata["a"]);
+ Assert.Equal("2", options.Metadata["b"]);
+ }
+
+ ///
+ /// Verify options creation.
+ ///
+ [Fact]
+ public void VerifyAssistantMessageAdapterCreateOptionsWithMetadataNull()
+ {
+ // Arrange: Setup message with null metadata value
+ ChatMessageContent message =
+ new(AuthorRole.User, "test")
+ {
+ Metadata =
+ new Dictionary()
+ {
+ { "a", null },
+ { "b", "2" },
+ }
+ };
+
+ // Act: Create options
+ MessageCreationOptions options = AssistantMessageFactory.CreateOptions(message);
+
+ // Assert
+ Assert.NotNull(options);
+ Assert.NotEmpty(options.Metadata);
+ Assert.Equal(2, options.Metadata.Count);
+ Assert.Equal(string.Empty, options.Metadata["a"]);
+ Assert.Equal("2", options.Metadata["b"]);
+ }
+
+ ///
+ /// Verify options creation.
+ ///
+ [Fact]
+ public void VerifyAssistantMessageAdapterGetMessageContentsWithText()
+ {
+ // Arrange
+ ChatMessageContent message = new(AuthorRole.User, items: [new TextContent("test")]);
+
+ // Act
+ MessageContent[] contents = AssistantMessageFactory.GetMessageContents(message).ToArray();
+
+ // Assert
+ Assert.NotNull(contents);
+ Assert.Single(contents);
+ Assert.NotNull(contents.Single().Text);
+ }
+
+ ///
+ /// Verify options creation.
+ ///
+ [Fact]
+ public void VerifyAssistantMessageAdapterGetMessageWithImageUrl()
+ {
+ // Arrange
+ ChatMessageContent message = new(AuthorRole.User, items: [new ImageContent(new Uri("https://localhost/myimage.png"))]);
+
+ // Act
+ MessageContent[] contents = AssistantMessageFactory.GetMessageContents(message).ToArray();
+
+ // Assert
+ Assert.NotNull(contents);
+ Assert.Single(contents);
+ Assert.NotNull(contents.Single().ImageUrl);
+ }
+
+ ///
+ /// Verify options creation.
+ ///
+ [Fact(Skip = "API bug with data Uri construction")]
+ public void VerifyAssistantMessageAdapterGetMessageWithImageData()
+ {
+ // Arrange
+ ChatMessageContent message = new(AuthorRole.User, items: [new ImageContent(new byte[] { 1, 2, 3 }, "image/png")]);
+
+ // Act
+ MessageContent[] contents = AssistantMessageFactory.GetMessageContents(message).ToArray();
+
+ // Assert
+ Assert.NotNull(contents);
+ Assert.Single(contents);
+ Assert.NotNull(contents.Single().ImageUrl);
+ }
+
+ ///
+ /// Verify options creation.
+ ///
+ [Fact]
+ public void VerifyAssistantMessageAdapterGetMessageWithImageFile()
+ {
+ // Arrange
+ ChatMessageContent message = new(AuthorRole.User, items: [new FileReferenceContent("file-id")]);
+
+ // Act
+ MessageContent[] contents = AssistantMessageFactory.GetMessageContents(message).ToArray();
+
+ // Assert
+ Assert.NotNull(contents);
+ Assert.Single(contents);
+ Assert.NotNull(contents.Single().ImageFileId);
+ }
+
+ ///
+ /// Verify options creation.
+ ///
+ [Fact]
+ public void VerifyAssistantMessageAdapterGetMessageWithAll()
+ {
+ // Arrange
+ ChatMessageContent message =
+ new(
+ AuthorRole.User,
+ items:
+ [
+ new TextContent("test"),
+ new ImageContent(new Uri("https://localhost/myimage.png")),
+ new FileReferenceContent("file-id")
+ ]);
+
+ // Act
+ MessageContent[] contents = AssistantMessageFactory.GetMessageContents(message).ToArray();
+
+ // Assert
+ Assert.NotNull(contents);
+ Assert.Equal(3, contents.Length);
+ }
+}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs
new file mode 100644
index 000000000000..d6bcf91b8a94
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/OpenAI/Internal/AssistantRunOptionsFactoryTests.cs
@@ -0,0 +1,139 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Microsoft.SemanticKernel.Agents.OpenAI.Internal;
+using OpenAI.Assistants;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.OpenAI.Internal;
+
+///
+/// Unit testing of .
+///
+public class AssistantRunOptionsFactoryTests
+{
+ ///
+ /// Verify run options generation with null .
+ ///
+ [Fact]
+ public void AssistantRunOptionsFactoryExecutionOptionsNullTest()
+ {
+ // Arrange
+ OpenAIAssistantDefinition definition =
+ new("gpt-anything")
+ {
+ Temperature = 0.5F,
+ };
+
+ // Act
+ RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, null);
+
+ // Assert
+ Assert.NotNull(options);
+ Assert.Null(options.Temperature);
+ Assert.Null(options.NucleusSamplingFactor);
+ Assert.Empty(options.Metadata);
+ }
+
+ ///
+ /// Verify run options generation with equivalent .
+ ///
+ [Fact]
+ public void AssistantRunOptionsFactoryExecutionOptionsEquivalentTest()
+ {
+ // Arrange
+ OpenAIAssistantDefinition definition =
+ new("gpt-anything")
+ {
+ Temperature = 0.5F,
+ };
+
+ OpenAIAssistantInvocationOptions invocationOptions =
+ new()
+ {
+ Temperature = 0.5F,
+ };
+
+ // Act
+ RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, invocationOptions);
+
+ // Assert
+ Assert.NotNull(options);
+ Assert.Null(options.Temperature);
+ Assert.Null(options.NucleusSamplingFactor);
+ }
+
+ ///
+ /// Verify run options generation with override.
+ ///
+ [Fact]
+ public void AssistantRunOptionsFactoryExecutionOptionsOverrideTest()
+ {
+ // Arrange
+ OpenAIAssistantDefinition definition =
+ new("gpt-anything")
+ {
+ Temperature = 0.5F,
+ ExecutionOptions =
+ new()
+ {
+ TruncationMessageCount = 5,
+ },
+ };
+
+ OpenAIAssistantInvocationOptions invocationOptions =
+ new()
+ {
+ Temperature = 0.9F,
+ TruncationMessageCount = 8,
+ EnableJsonResponse = true,
+ };
+
+ // Act
+ RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, invocationOptions);
+
+ // Assert
+ Assert.NotNull(options);
+ Assert.Equal(0.9F, options.Temperature);
+ Assert.Equal(8, options.TruncationStrategy.LastMessages);
+ Assert.Equal(AssistantResponseFormat.JsonObject, options.ResponseFormat);
+ Assert.Null(options.NucleusSamplingFactor);
+ }
+
+ ///
+ /// Verify run options generation with metadata.
+ ///
+ [Fact]
+ public void AssistantRunOptionsFactoryExecutionOptionsMetadataTest()
+ {
+ // Arrange
+ OpenAIAssistantDefinition definition =
+ new("gpt-anything")
+ {
+ Temperature = 0.5F,
+ ExecutionOptions =
+ new()
+ {
+ TruncationMessageCount = 5,
+ },
+ };
+
+ OpenAIAssistantInvocationOptions invocationOptions =
+ new()
+ {
+ Metadata = new Dictionary
+ {
+ { "key1", "value" },
+ { "key2", null! },
+ },
+ };
+
+ // Act
+ RunCreationOptions options = AssistantRunOptionsFactory.GenerateOptions(definition, invocationOptions);
+
+ // Assert
+ Assert.Equal(2, options.Metadata.Count);
+ Assert.Equal("value", options.Metadata["key1"]);
+ Assert.Equal(string.Empty, options.Metadata["key2"]);
+ }
+}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs
index 1d9a9ec9dfcf..ef67c48f1473 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantAgentTests.cs
@@ -4,12 +4,14 @@
using System.Linq;
using System.Net;
using System.Net.Http;
+using System.Text;
+using System.Text.Json;
using System.Threading.Tasks;
-using Azure.AI.OpenAI.Assistants;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Assistants;
using Xunit;
namespace SemanticKernel.Agents.UnitTests.OpenAI;
@@ -30,100 +32,257 @@ public sealed class OpenAIAssistantAgentTests : IDisposable
[Fact]
public async Task VerifyOpenAIAssistantAgentCreationEmptyAsync()
{
- OpenAIAssistantDefinition definition =
- new()
- {
- ModelId = "testmodel",
- };
-
- this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentSimple);
-
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateAsync(
- this._emptyKernel,
- this.CreateTestConfiguration(targetAzure: true, useVersion: true),
- definition);
+ // Arrange
+ OpenAIAssistantDefinition definition = new("testmodel");
- Assert.NotNull(agent);
- Assert.NotNull(agent.Id);
- Assert.Null(agent.Instructions);
- Assert.Null(agent.Name);
- Assert.Null(agent.Description);
- Assert.False(agent.IsDeleted);
+ // Act and Assert
+ await this.VerifyAgentCreationAsync(definition);
}
///
/// Verify the invocation and response of
- /// for an agent with optional properties defined.
+ /// for an agent with name, instructions, and description.
///
[Fact]
public async Task VerifyOpenAIAssistantAgentCreationPropertiesAsync()
{
+ // Arrange
OpenAIAssistantDefinition definition =
- new()
+ new("testmodel")
{
- ModelId = "testmodel",
Name = "testname",
Description = "testdescription",
Instructions = "testinstructions",
};
- this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentFull);
+ // Act and Assert
+ await this.VerifyAgentCreationAsync(definition);
+ }
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateAsync(
- this._emptyKernel,
- this.CreateTestConfiguration(),
- definition);
+ ///
+ /// Verify the invocation and response of
+ /// for an agent with code-interpreter enabled.
+ ///
+ [Fact]
+ public async Task VerifyOpenAIAssistantAgentCreationWithCodeInterpreterAsync()
+ {
+ // Arrange
+ OpenAIAssistantDefinition definition =
+ new("testmodel")
+ {
+ EnableCodeInterpreter = true,
+ };
- Assert.NotNull(agent);
- Assert.NotNull(agent.Id);
- Assert.NotNull(agent.Instructions);
- Assert.NotNull(agent.Name);
- Assert.NotNull(agent.Description);
- Assert.False(agent.IsDeleted);
+ // Act and Assert
+ await this.VerifyAgentCreationAsync(definition);
}
///
/// Verify the invocation and response of
- /// for an agent that has all properties defined..
+ /// for an agent with code-interpreter files.
///
[Fact]
- public async Task VerifyOpenAIAssistantAgentCreationEverythingAsync()
+ public async Task VerifyOpenAIAssistantAgentCreationWithCodeInterpreterFilesAsync()
{
+ // Arrange
OpenAIAssistantDefinition definition =
- new()
+ new("testmodel")
{
- ModelId = "testmodel",
EnableCodeInterpreter = true,
- EnableRetrieval = true,
- FileIds = ["#1", "#2"],
- Metadata = new Dictionary() { { "a", "1" } },
+ CodeInterpreterFileIds = ["file1", "file2"],
};
- this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentWithEverything);
+ // Act and Assert
+ await this.VerifyAgentCreationAsync(definition);
+ }
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateAsync(
- this._emptyKernel,
- this.CreateTestConfiguration(),
- definition);
+ ///
+ /// Verify the invocation and response of
+ /// for an agent with a file-search and no vector-store
+ ///
+ [Fact]
+ public async Task VerifyOpenAIAssistantAgentCreationWithFileSearchAsync()
+ {
+ // Arrange
+ OpenAIAssistantDefinition definition =
+ new("testmodel")
+ {
+ EnableFileSearch = true,
+ };
- Assert.NotNull(agent);
- Assert.Equal(2, agent.Tools.Count);
- Assert.True(agent.Tools.OfType().Any());
- Assert.True(agent.Tools.OfType().Any());
- Assert.NotEmpty(agent.FileIds);
- Assert.NotEmpty(agent.Metadata);
+ // Act and Assert
+ await this.VerifyAgentCreationAsync(definition);
+ }
+
+ ///
+ /// Verify the invocation and response of
+ /// for an agent with a vector-store-id (for file-search).
+ ///
+ [Fact]
+ public async Task VerifyOpenAIAssistantAgentCreationWithVectorStoreAsync()
+ {
+ // Arrange
+ OpenAIAssistantDefinition definition =
+ new("testmodel")
+ {
+ EnableFileSearch = true,
+ VectorStoreId = "#vs1",
+ };
+
+ // Act and Assert
+ await this.VerifyAgentCreationAsync(definition);
+ }
+
+ ///
+ /// Verify the invocation and response of
+ /// for an agent with metadata.
+ ///
+ [Fact]
+ public async Task VerifyOpenAIAssistantAgentCreationWithMetadataAsync()
+ {
+ // Arrange
+ OpenAIAssistantDefinition definition =
+ new("testmodel")
+ {
+ Metadata = new Dictionary()
+ {
+ { "a", "1" },
+ { "b", "2" },
+ },
+ };
+
+ // Act and Assert
+ await this.VerifyAgentCreationAsync(definition);
+ }
+
+ ///
+ /// Verify the invocation and response of
+ /// for an agent with json-response mode enabled.
+ ///
+ [Fact]
+ public async Task VerifyOpenAIAssistantAgentCreationWithJsonResponseAsync()
+ {
+ // Arrange
+ OpenAIAssistantDefinition definition =
+ new("testmodel")
+ {
+ EnableJsonResponse = true,
+ };
+
+ // Act and Assert
+ await this.VerifyAgentCreationAsync(definition);
+ }
+
+ ///
+ /// Verify the invocation and response of
+ /// for an agent with temperature defined.
+ ///
+ [Fact]
+ public async Task VerifyOpenAIAssistantAgentCreationWithTemperatureAsync()
+ {
+ // Arrange
+ OpenAIAssistantDefinition definition =
+ new("testmodel")
+ {
+ Temperature = 2.0F,
+ };
+
+ // Act and Assert
+ await this.VerifyAgentCreationAsync(definition);
+ }
+
+ ///
+ /// Verify the invocation and response of
+ /// for an agent with topP defined.
+ ///
+ [Fact]
+ public async Task VerifyOpenAIAssistantAgentCreationWithTopPAsync()
+ {
+ // Arrange
+ OpenAIAssistantDefinition definition =
+ new("testmodel")
+ {
+ TopP = 2.0F,
+ };
+
+ // Act and Assert
+ await this.VerifyAgentCreationAsync(definition);
+ }
+
+ ///
+ /// Verify the invocation and response of
+ /// for an agent with empty execution settings.
+ ///
+ [Fact]
+ public async Task VerifyOpenAIAssistantAgentCreationWithEmptyExecutionOptionsAsync()
+ {
+ // Arrange
+ OpenAIAssistantDefinition definition =
+ new("testmodel")
+ {
+ ExecutionOptions = new OpenAIAssistantExecutionOptions(),
+ };
+
+ // Act and Assert
+ await this.VerifyAgentCreationAsync(definition);
+ }
+
+ ///
+ /// Verify the invocation and response of
+ /// for an agent with populated execution settings.
+ ///
+ [Fact]
+ public async Task VerifyOpenAIAssistantAgentCreationWithExecutionOptionsAsync()
+ {
+ // Arrange
+ OpenAIAssistantDefinition definition =
+ new("testmodel")
+ {
+ ExecutionOptions =
+ new()
+ {
+ MaxCompletionTokens = 100,
+ ParallelToolCallsEnabled = false,
+ }
+ };
+
+ // Act and Assert
+ await this.VerifyAgentCreationAsync(definition);
+ }
+
+ ///
+ /// Verify the invocation and response of
+ /// for an agent with execution settings and meta-data.
+ ///
+ [Fact]
+ public async Task VerifyOpenAIAssistantAgentCreationWithEmptyExecutionOptionsAndMetadataAsync()
+ {
+ // Arrange
+ OpenAIAssistantDefinition definition =
+ new("testmodel")
+ {
+ ExecutionOptions = new(),
+ Metadata = new Dictionary()
+ {
+ { "a", "1" },
+ { "b", "2" },
+ },
+ };
+
+ // Act and Assert
+ await this.VerifyAgentCreationAsync(definition);
}
///
/// Verify the invocation and response of .
///
[Fact]
- public async Task VerifyOpenAIAssistantAgentRetrieveAsync()
+ public async Task VerifyOpenAIAssistantAgentRetrievalAsync()
{
- this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentSimple);
+ // Arrange
+ OpenAIAssistantDefinition definition = new("testmodel");
+
+ this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentPayload(definition));
OpenAIAssistantAgent agent =
await OpenAIAssistantAgent.RetrieveAsync(
@@ -131,12 +290,8 @@ await OpenAIAssistantAgent.RetrieveAsync(
this.CreateTestConfiguration(),
"#id");
- Assert.NotNull(agent);
- Assert.NotNull(agent.Id);
- Assert.Null(agent.Instructions);
- Assert.Null(agent.Name);
- Assert.Null(agent.Description);
- Assert.False(agent.IsDeleted);
+ // Act and Assert
+ ValidateAgentDefinition(agent, definition);
}
///
@@ -145,16 +300,50 @@ await OpenAIAssistantAgent.RetrieveAsync(
[Fact]
public async Task VerifyOpenAIAssistantAgentDeleteAsync()
{
+ // Arrange
OpenAIAssistantAgent agent = await this.CreateAgentAsync();
+ // Assert
Assert.False(agent.IsDeleted);
+ // Arrange
this.SetupResponse(HttpStatusCode.OK, ResponseContent.DeleteAgent);
+ // Act
await agent.DeleteAsync();
+ // Assert
Assert.True(agent.IsDeleted);
+ // Act
await agent.DeleteAsync(); // Doesn't throw
+ // Assert
Assert.True(agent.IsDeleted);
+ await Assert.ThrowsAsync(() => agent.AddChatMessageAsync("threadid", new(AuthorRole.User, "test")));
+ await Assert.ThrowsAsync(() => agent.InvokeAsync("threadid").ToArrayAsync().AsTask());
+ }
+
+ ///
+ /// Verify the deletion of agent via .
+ ///
+ [Fact]
+ public async Task VerifyOpenAIAssistantAgentCreateThreadAsync()
+ {
+ // Arrange
+ OpenAIAssistantAgent agent = await this.CreateAgentAsync();
+
+ this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateThread);
+
+ // Act
+ string threadId = await agent.CreateThreadAsync();
+ // Assert
+ Assert.NotNull(threadId);
+
+ // Arrange
+ this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateThread);
+
+ // Act
+ threadId = await agent.CreateThreadAsync(new());
+ // Assert
+ Assert.NotNull(threadId);
}
///
@@ -163,6 +352,7 @@ public async Task VerifyOpenAIAssistantAgentDeleteAsync()
[Fact]
public async Task VerifyOpenAIAssistantAgentChatTextMessageAsync()
{
+ // Arrange
OpenAIAssistantAgent agent = await this.CreateAgentAsync();
this.SetupResponses(
@@ -174,7 +364,11 @@ public async Task VerifyOpenAIAssistantAgentChatTextMessageAsync()
ResponseContent.GetTextMessage);
AgentGroupChat chat = new();
+
+ // Act
ChatMessageContent[] messages = await chat.InvokeAsync(agent).ToArrayAsync();
+
+ // Assert
Assert.Single(messages);
Assert.Single(messages[0].Items);
Assert.IsType(messages[0].Items[0]);
@@ -186,6 +380,7 @@ public async Task VerifyOpenAIAssistantAgentChatTextMessageAsync()
[Fact]
public async Task VerifyOpenAIAssistantAgentChatTextMessageWithAnnotationAsync()
{
+ // Arrange
OpenAIAssistantAgent agent = await this.CreateAgentAsync();
this.SetupResponses(
@@ -197,7 +392,11 @@ public async Task VerifyOpenAIAssistantAgentChatTextMessageWithAnnotationAsync()
ResponseContent.GetTextMessageWithAnnotation);
AgentGroupChat chat = new();
+
+ // Act
ChatMessageContent[] messages = await chat.InvokeAsync(agent).ToArrayAsync();
+
+ // Assert
Assert.Single(messages);
Assert.Equal(2, messages[0].Items.Count);
Assert.NotNull(messages[0].Items.SingleOrDefault(c => c is TextContent));
@@ -210,6 +409,7 @@ public async Task VerifyOpenAIAssistantAgentChatTextMessageWithAnnotationAsync()
[Fact]
public async Task VerifyOpenAIAssistantAgentChatImageMessageAsync()
{
+ // Arrange
OpenAIAssistantAgent agent = await this.CreateAgentAsync();
this.SetupResponses(
@@ -221,7 +421,11 @@ public async Task VerifyOpenAIAssistantAgentChatImageMessageAsync()
ResponseContent.GetImageMessage);
AgentGroupChat chat = new();
+
+ // Act
ChatMessageContent[] messages = await chat.InvokeAsync(agent).ToArrayAsync();
+
+ // Assert
Assert.Single(messages);
Assert.Single(messages[0].Items);
Assert.IsType(messages[0].Items[0]);
@@ -233,7 +437,7 @@ public async Task VerifyOpenAIAssistantAgentChatImageMessageAsync()
[Fact]
public async Task VerifyOpenAIAssistantAgentGetMessagesAsync()
{
- // Create agent
+ // Arrange: Create agent
OpenAIAssistantAgent agent = await this.CreateAgentAsync();
// Initialize agent channel
@@ -246,18 +450,22 @@ public async Task VerifyOpenAIAssistantAgentGetMessagesAsync()
ResponseContent.GetTextMessage);
AgentGroupChat chat = new();
+
+ // Act
ChatMessageContent[] messages = await chat.InvokeAsync(agent).ToArrayAsync();
+ // Assert
Assert.Single(messages);
- // Setup messages
+ // Arrange: Setup messages
this.SetupResponses(
HttpStatusCode.OK,
ResponseContent.ListMessagesPageMore,
ResponseContent.ListMessagesPageMore,
ResponseContent.ListMessagesPageFinal);
- // Get messages and verify
+ // Act: Get messages
messages = await chat.GetChatMessagesAsync(agent).ToArrayAsync();
+ // Assert
Assert.Equal(5, messages.Length);
}
@@ -267,7 +475,7 @@ public async Task VerifyOpenAIAssistantAgentGetMessagesAsync()
[Fact]
public async Task VerifyOpenAIAssistantAgentAddMessagesAsync()
{
- // Create agent
+ // Arrange: Create agent
OpenAIAssistantAgent agent = await this.CreateAgentAsync();
// Initialize agent channel
@@ -279,12 +487,18 @@ public async Task VerifyOpenAIAssistantAgentAddMessagesAsync()
ResponseContent.MessageSteps,
ResponseContent.GetTextMessage);
AgentGroupChat chat = new();
+
+ // Act
ChatMessageContent[] messages = await chat.InvokeAsync(agent).ToArrayAsync();
+ // Assert
Assert.Single(messages);
+ // Arrange
chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, "hi"));
+ // Act
messages = await chat.GetChatMessagesAsync().ToArrayAsync();
+ // Assert
Assert.Equal(2, messages.Length);
}
@@ -294,6 +508,7 @@ public async Task VerifyOpenAIAssistantAgentAddMessagesAsync()
[Fact]
public async Task VerifyOpenAIAssistantAgentListDefinitionAsync()
{
+ // Arrange
OpenAIAssistantAgent agent = await this.CreateAgentAsync();
this.SetupResponses(
@@ -302,20 +517,24 @@ public async Task VerifyOpenAIAssistantAgentListDefinitionAsync()
ResponseContent.ListAgentsPageMore,
ResponseContent.ListAgentsPageFinal);
+ // Act
var messages =
await OpenAIAssistantAgent.ListDefinitionsAsync(
this.CreateTestConfiguration()).ToArrayAsync();
+ // Assert
Assert.Equal(7, messages.Length);
+ // Arrange
this.SetupResponses(
HttpStatusCode.OK,
ResponseContent.ListAgentsPageMore,
- ResponseContent.ListAgentsPageMore);
+ ResponseContent.ListAgentsPageFinal);
+ // Act
messages =
await OpenAIAssistantAgent.ListDefinitionsAsync(
- this.CreateTestConfiguration(),
- maxResults: 4).ToArrayAsync();
+ this.CreateTestConfiguration()).ToArrayAsync();
+ // Assert
Assert.Equal(4, messages.Length);
}
@@ -325,6 +544,7 @@ await OpenAIAssistantAgent.ListDefinitionsAsync(
[Fact]
public async Task VerifyOpenAIAssistantAgentWithFunctionCallAsync()
{
+ // Arrange
OpenAIAssistantAgent agent = await this.CreateAgentAsync();
KernelPlugin plugin = KernelPluginFactory.CreateFromType();
@@ -342,7 +562,11 @@ public async Task VerifyOpenAIAssistantAgentWithFunctionCallAsync()
ResponseContent.GetTextMessage);
AgentGroupChat chat = new();
+
+ // Act
ChatMessageContent[] messages = await chat.InvokeAsync(agent).ToArrayAsync();
+
+ // Assert
Assert.Single(messages);
Assert.Single(messages[0].Items);
Assert.IsType(messages[0].Items[0]);
@@ -365,15 +589,95 @@ public OpenAIAssistantAgentTests()
this._emptyKernel = new Kernel();
}
- private Task CreateAgentAsync()
+ private async Task VerifyAgentCreationAsync(OpenAIAssistantDefinition definition)
{
- OpenAIAssistantDefinition definition =
- new()
+ this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentPayload(definition));
+
+ OpenAIAssistantAgent agent =
+ await OpenAIAssistantAgent.CreateAsync(
+ this._emptyKernel,
+ this.CreateTestConfiguration(),
+ definition);
+
+ ValidateAgentDefinition(agent, definition);
+ }
+
+ private static void ValidateAgentDefinition(OpenAIAssistantAgent agent, OpenAIAssistantDefinition sourceDefinition)
+ {
+ // Verify fundamental state
+ Assert.NotNull(agent);
+ Assert.NotNull(agent.Id);
+ Assert.False(agent.IsDeleted);
+ Assert.NotNull(agent.Definition);
+ Assert.Equal(sourceDefinition.ModelId, agent.Definition.ModelId);
+
+ // Verify core properties
+ Assert.Equal(sourceDefinition.Instructions ?? string.Empty, agent.Instructions);
+ Assert.Equal(sourceDefinition.Name ?? string.Empty, agent.Name);
+ Assert.Equal(sourceDefinition.Description ?? string.Empty, agent.Description);
+
+ // Verify options
+ Assert.Equal(sourceDefinition.Temperature, agent.Definition.Temperature);
+ Assert.Equal(sourceDefinition.TopP, agent.Definition.TopP);
+ Assert.Equal(sourceDefinition.ExecutionOptions?.MaxCompletionTokens, agent.Definition.ExecutionOptions?.MaxCompletionTokens);
+ Assert.Equal(sourceDefinition.ExecutionOptions?.MaxPromptTokens, agent.Definition.ExecutionOptions?.MaxPromptTokens);
+ Assert.Equal(sourceDefinition.ExecutionOptions?.ParallelToolCallsEnabled, agent.Definition.ExecutionOptions?.ParallelToolCallsEnabled);
+ Assert.Equal(sourceDefinition.ExecutionOptions?.TruncationMessageCount, agent.Definition.ExecutionOptions?.TruncationMessageCount);
+
+ // Verify tool definitions
+ int expectedToolCount = 0;
+
+ bool hasCodeInterpreter = false;
+ if (sourceDefinition.EnableCodeInterpreter)
+ {
+ hasCodeInterpreter = true;
+ ++expectedToolCount;
+ }
+
+ Assert.Equal(hasCodeInterpreter, agent.Tools.OfType().Any());
+
+ bool hasFileSearch = false;
+ if (sourceDefinition.EnableFileSearch)
+ {
+ hasFileSearch = true;
+ ++expectedToolCount;
+ }
+
+ Assert.Equal(hasFileSearch, agent.Tools.OfType().Any());
+
+ Assert.Equal(expectedToolCount, agent.Tools.Count);
+
+ // Verify metadata
+ Assert.NotNull(agent.Definition.Metadata);
+ if (sourceDefinition.ExecutionOptions == null)
+ {
+ Assert.Equal(sourceDefinition.Metadata ?? new Dictionary(), agent.Definition.Metadata);
+ }
+ else // Additional metadata present when execution options are defined
+ {
+ Assert.Equal((sourceDefinition.Metadata?.Count ?? 0) + 1, agent.Definition.Metadata.Count);
+
+ if (sourceDefinition.Metadata != null)
{
- ModelId = "testmodel",
- };
+ foreach (var (key, value) in sourceDefinition.Metadata)
+ {
+ string? targetValue = agent.Definition.Metadata[key];
+ Assert.NotNull(targetValue);
+ Assert.Equal(value, targetValue);
+ }
+ }
+ }
+
+ // Verify detail definition
+ Assert.Equal(sourceDefinition.VectorStoreId, agent.Definition.VectorStoreId);
+ Assert.Equal(sourceDefinition.CodeInterpreterFileIds, agent.Definition.CodeInterpreterFileIds);
+ }
- this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentSimple);
+ private Task CreateAgentAsync()
+ {
+ OpenAIAssistantDefinition definition = new("testmodel");
+
+ this.SetupResponse(HttpStatusCode.OK, ResponseContent.CreateAgentPayload(definition));
return
OpenAIAssistantAgent.CreateAsync(
@@ -382,14 +686,10 @@ private Task CreateAgentAsync()
definition);
}
- private OpenAIAssistantConfiguration CreateTestConfiguration(bool targetAzure = false, bool useVersion = false)
- {
- return new(apiKey: "fakekey", endpoint: targetAzure ? "https://localhost" : null)
- {
- HttpClient = this._httpClient,
- Version = useVersion ? AssistantsClientOptions.ServiceVersion.V2024_02_15_Preview : null,
- };
- }
+ private OpenAIClientProvider CreateTestConfiguration(bool targetAzure = false)
+ => targetAzure ?
+ OpenAIClientProvider.ForAzureOpenAI(apiKey: "fakekey", endpoint: new Uri("https://localhost"), this._httpClient) :
+ OpenAIClientProvider.ForOpenAI(apiKey: "fakekey", endpoint: null, this._httpClient);
private void SetupResponse(HttpStatusCode statusCode, string content)
{
@@ -423,58 +723,114 @@ public void MyFunction(int index)
private static class ResponseContent
{
- public const string CreateAgentSimple =
- """
+ public static string CreateAgentPayload(OpenAIAssistantDefinition definition)
+ {
+ StringBuilder builder = new();
+ builder.AppendLine("{");
+ builder.AppendLine(@" ""id"": ""asst_abc123"",");
+ builder.AppendLine(@" ""object"": ""assistant"",");
+ builder.AppendLine(@" ""created_at"": 1698984975,");
+ builder.AppendLine(@$" ""name"": ""{definition.Name}"",");
+ builder.AppendLine(@$" ""description"": ""{definition.Description}"",");
+ builder.AppendLine(@$" ""instructions"": ""{definition.Instructions}"",");
+ builder.AppendLine(@$" ""model"": ""{definition.ModelId}"",");
+
+ bool hasCodeInterpreter = definition.EnableCodeInterpreter;
+ bool hasCodeInterpreterFiles = (definition.CodeInterpreterFileIds?.Count ?? 0) > 0;
+ bool hasFileSearch = definition.EnableFileSearch;
+ if (!hasCodeInterpreter && !hasFileSearch)
{
- "id": "asst_abc123",
- "object": "assistant",
- "created_at": 1698984975,
- "name": null,
- "description": null,
- "model": "gpt-4-turbo",
- "instructions": null,
- "tools": [],
- "file_ids": [],
- "metadata": {}
+ builder.AppendLine(@" ""tools"": [],");
}
- """;
+ else
+ {
+ builder.AppendLine(@" ""tools"": [");
- public const string CreateAgentFull =
- """
+ if (hasCodeInterpreter)
+ {
+ builder.Append(@$" {{ ""type"": ""code_interpreter"" }}{(hasFileSearch ? "," : string.Empty)}");
+ }
+
+ if (hasFileSearch)
+ {
+ builder.AppendLine(@" { ""type"": ""file_search"" }");
+ }
+
+ builder.AppendLine(" ],");
+ }
+
+ if (!hasCodeInterpreterFiles && !hasFileSearch)
{
- "id": "asst_abc123",
- "object": "assistant",
- "created_at": 1698984975,
- "name": "testname",
- "description": "testdescription",
- "model": "gpt-4-turbo",
- "instructions": "testinstructions",
- "tools": [],
- "file_ids": [],
- "metadata": {}
+ builder.AppendLine(@" ""tool_resources"": {},");
}
- """;
+ else
+ {
+ builder.AppendLine(@" ""tool_resources"": {");
- public const string CreateAgentWithEverything =
- """
+ if (hasCodeInterpreterFiles)
+ {
+ string fileIds = string.Join(",", definition.CodeInterpreterFileIds!.Select(fileId => "\"" + fileId + "\""));
+ builder.AppendLine(@$" ""code_interpreter"": {{ ""file_ids"": [{fileIds}] }}{(hasFileSearch ? "," : string.Empty)}");
+ }
+
+ if (hasFileSearch)
+ {
+ builder.AppendLine(@$" ""file_search"": {{ ""vector_store_ids"": [""{definition.VectorStoreId}""] }}");
+ }
+
+ builder.AppendLine(" },");
+ }
+
+ if (definition.Temperature.HasValue)
{
- "id": "asst_abc123",
- "object": "assistant",
- "created_at": 1698984975,
- "name": null,
- "description": null,
- "model": "gpt-4-turbo",
- "instructions": null,
- "tools": [
+ builder.AppendLine(@$" ""temperature"": {definition.Temperature},");
+ }
+
+ if (definition.TopP.HasValue)
+ {
+ builder.AppendLine(@$" ""top_p"": {definition.TopP},");
+ }
+
+ bool hasExecutionOptions = definition.ExecutionOptions != null;
+ int metadataCount = (definition.Metadata?.Count ?? 0);
+ if (metadataCount == 0 && !hasExecutionOptions)
+ {
+ builder.AppendLine(@" ""metadata"": {}");
+ }
+ else
+ {
+ int index = 0;
+ builder.AppendLine(@" ""metadata"": {");
+
+ if (hasExecutionOptions)
{
- "type": "code_interpreter"
- },
+ string serializedExecutionOptions = JsonSerializer.Serialize(definition.ExecutionOptions);
+ builder.AppendLine(@$" ""{OpenAIAssistantAgent.OptionsMetadataKey}"": ""{JsonEncodedText.Encode(serializedExecutionOptions)}""{(metadataCount > 0 ? "," : string.Empty)}");
+ }
+
+ if (metadataCount > 0)
{
- "type": "retrieval"
+ foreach (var (key, value) in definition.Metadata!)
+ {
+ builder.AppendLine(@$" ""{key}"": ""{value}""{(index < metadataCount - 1 ? "," : string.Empty)}");
+ ++index;
+ }
}
- ],
- "file_ids": ["#1", "#2"],
- "metadata": {"a": "1"}
+
+ builder.AppendLine(" }");
+ }
+
+ builder.AppendLine("}");
+
+ return builder.ToString();
+ }
+
+ public const string CreateAgentWithEverything =
+ """
+ {
+ "tool_resources": {
+ "file_search": { "vector_store_ids": ["#vs"] }
+ },
}
""";
@@ -748,7 +1104,6 @@ private static class ResponseContent
"model": "gpt-4-turbo",
"instructions": "You are a helpful assistant designed to make me better at coding!",
"tools": [],
- "file_ids": [],
"metadata": {}
},
{
@@ -760,7 +1115,6 @@ private static class ResponseContent
"model": "gpt-4-turbo",
"instructions": "You are a helpful assistant designed to make me better at coding!",
"tools": [],
- "file_ids": [],
"metadata": {}
},
{
@@ -772,7 +1126,6 @@ private static class ResponseContent
"model": "gpt-4-turbo",
"instructions": null,
"tools": [],
- "file_ids": [],
"metadata": {}
}
],
@@ -796,7 +1149,6 @@ private static class ResponseContent
"model": "gpt-4-turbo",
"instructions": "You are a helpful assistant designed to make me better at coding!",
"tools": [],
- "file_ids": [],
"metadata": {}
}
],
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantConfigurationTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantConfigurationTests.cs
deleted file mode 100644
index 3708ab50ab97..000000000000
--- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantConfigurationTests.cs
+++ /dev/null
@@ -1,61 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using System;
-using System.Net.Http;
-using Azure.AI.OpenAI.Assistants;
-using Microsoft.SemanticKernel.Agents.OpenAI;
-using Xunit;
-
-namespace SemanticKernel.Agents.UnitTests.OpenAI;
-
-///
-/// Unit testing of .
-///
-public class OpenAIAssistantConfigurationTests
-{
- ///
- /// Verify initial state.
- ///
- [Fact]
- public void VerifyOpenAIAssistantConfigurationInitialState()
- {
- OpenAIAssistantConfiguration config = new(apiKey: "testkey");
-
- Assert.Equal("testkey", config.ApiKey);
- Assert.Null(config.Endpoint);
- Assert.Null(config.HttpClient);
- Assert.Null(config.Version);
- }
-
- ///
- /// Verify assignment.
- ///
- [Fact]
- public void VerifyOpenAIAssistantConfigurationAssignment()
- {
- using HttpClient client = new();
-
- OpenAIAssistantConfiguration config =
- new(apiKey: "testkey", endpoint: "https://localhost")
- {
- HttpClient = client,
- Version = AssistantsClientOptions.ServiceVersion.V2024_02_15_Preview,
- };
-
- Assert.Equal("testkey", config.ApiKey);
- Assert.Equal("https://localhost", config.Endpoint);
- Assert.NotNull(config.HttpClient);
- Assert.Equal(AssistantsClientOptions.ServiceVersion.V2024_02_15_Preview, config.Version);
- }
-
- ///
- /// Verify secure endpoint.
- ///
- [Fact]
- public void VerifyOpenAIAssistantConfigurationThrows()
- {
- using HttpClient client = new();
-
- Assert.Throws(
- () => new OpenAIAssistantConfiguration(apiKey: "testkey", endpoint: "http://localhost"));
- }
-}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs
index b17b61211c18..f8547f375f13 100644
--- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantDefinitionTests.cs
@@ -1,5 +1,6 @@
// Copyright (c) Microsoft. All rights reserved.
using System.Collections.Generic;
+using System.Text.Json;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Xunit;
@@ -16,17 +17,27 @@ public class OpenAIAssistantDefinitionTests
[Fact]
public void VerifyOpenAIAssistantDefinitionInitialState()
{
- OpenAIAssistantDefinition definition = new();
+ // Arrange
+ OpenAIAssistantDefinition definition = new("testmodel");
- Assert.Null(definition.Id);
+ // Assert
+ Assert.Equal(string.Empty, definition.Id);
+ Assert.Equal("testmodel", definition.ModelId);
Assert.Null(definition.Name);
- Assert.Null(definition.ModelId);
Assert.Null(definition.Instructions);
Assert.Null(definition.Description);
Assert.Null(definition.Metadata);
- Assert.Null(definition.FileIds);
+ Assert.Null(definition.ExecutionOptions);
+ Assert.Null(definition.Temperature);
+ Assert.Null(definition.TopP);
+ Assert.False(definition.EnableFileSearch);
+ Assert.Null(definition.VectorStoreId);
+ Assert.Null(definition.CodeInterpreterFileIds);
Assert.False(definition.EnableCodeInterpreter);
- Assert.False(definition.EnableRetrieval);
+ Assert.False(definition.EnableJsonResponse);
+
+ // Act and Assert
+ ValidateSerialization(definition);
}
///
@@ -35,28 +46,80 @@ public void VerifyOpenAIAssistantDefinitionInitialState()
[Fact]
public void VerifyOpenAIAssistantDefinitionAssignment()
{
+ // Arrange
OpenAIAssistantDefinition definition =
- new()
+ new("testmodel")
{
Id = "testid",
Name = "testname",
- ModelId = "testmodel",
Instructions = "testinstructions",
Description = "testdescription",
- FileIds = ["id"],
+ EnableFileSearch = true,
+ VectorStoreId = "#vs",
Metadata = new Dictionary() { { "a", "1" } },
+ Temperature = 2,
+ TopP = 0,
+ ExecutionOptions =
+ new()
+ {
+ MaxCompletionTokens = 1000,
+ MaxPromptTokens = 1000,
+ ParallelToolCallsEnabled = false,
+ TruncationMessageCount = 12,
+ },
+ CodeInterpreterFileIds = ["file1"],
EnableCodeInterpreter = true,
- EnableRetrieval = true,
+ EnableJsonResponse = true,
};
+ // Assert
Assert.Equal("testid", definition.Id);
Assert.Equal("testname", definition.Name);
Assert.Equal("testmodel", definition.ModelId);
Assert.Equal("testinstructions", definition.Instructions);
Assert.Equal("testdescription", definition.Description);
+ Assert.True(definition.EnableFileSearch);
+ Assert.Equal("#vs", definition.VectorStoreId);
+ Assert.Equal(2, definition.Temperature);
+ Assert.Equal(0, definition.TopP);
+ Assert.NotNull(definition.ExecutionOptions);
+ Assert.Equal(1000, definition.ExecutionOptions.MaxCompletionTokens);
+ Assert.Equal(1000, definition.ExecutionOptions.MaxPromptTokens);
+ Assert.Equal(12, definition.ExecutionOptions.TruncationMessageCount);
+ Assert.False(definition.ExecutionOptions.ParallelToolCallsEnabled);
Assert.Single(definition.Metadata);
- Assert.Single(definition.FileIds);
+ Assert.Single(definition.CodeInterpreterFileIds);
Assert.True(definition.EnableCodeInterpreter);
- Assert.True(definition.EnableRetrieval);
+ Assert.True(definition.EnableJsonResponse);
+
+ // Act and Assert
+ ValidateSerialization(definition);
+ }
+
+ private static void ValidateSerialization(OpenAIAssistantDefinition source)
+ {
+ string json = JsonSerializer.Serialize(source);
+
+ OpenAIAssistantDefinition? target = JsonSerializer.Deserialize(json);
+
+ Assert.NotNull(target);
+ Assert.Equal(source.Id, target.Id);
+ Assert.Equal(source.Name, target.Name);
+ Assert.Equal(source.ModelId, target.ModelId);
+ Assert.Equal(source.Instructions, target.Instructions);
+ Assert.Equal(source.Description, target.Description);
+ Assert.Equal(source.EnableFileSearch, target.EnableFileSearch);
+ Assert.Equal(source.VectorStoreId, target.VectorStoreId);
+ Assert.Equal(source.Temperature, target.Temperature);
+ Assert.Equal(source.TopP, target.TopP);
+ Assert.Equal(source.EnableFileSearch, target.EnableFileSearch);
+ Assert.Equal(source.VectorStoreId, target.VectorStoreId);
+ Assert.Equal(source.EnableCodeInterpreter, target.EnableCodeInterpreter);
+ Assert.Equal(source.ExecutionOptions?.MaxCompletionTokens, target.ExecutionOptions?.MaxCompletionTokens);
+ Assert.Equal(source.ExecutionOptions?.MaxPromptTokens, target.ExecutionOptions?.MaxPromptTokens);
+ Assert.Equal(source.ExecutionOptions?.TruncationMessageCount, target.ExecutionOptions?.TruncationMessageCount);
+ Assert.Equal(source.ExecutionOptions?.ParallelToolCallsEnabled, target.ExecutionOptions?.ParallelToolCallsEnabled);
+ AssertCollection.Equal(source.CodeInterpreterFileIds, target.CodeInterpreterFileIds);
+ AssertCollection.Equal(source.Metadata, target.Metadata);
}
}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs
new file mode 100644
index 000000000000..99cbe012f183
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIAssistantInvocationOptionsTests.cs
@@ -0,0 +1,100 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using System.Text.Json;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.OpenAI;
+
+///
+/// Unit testing of .
+///
+public class OpenAIAssistantInvocationOptionsTests
+{
+ ///
+ /// Verify initial state.
+ ///
+ [Fact]
+ public void OpenAIAssistantInvocationOptionsInitialState()
+ {
+ // Arrange
+ OpenAIAssistantInvocationOptions options = new();
+
+ // Assert
+ Assert.Null(options.ModelName);
+ Assert.Null(options.Metadata);
+ Assert.Null(options.Temperature);
+ Assert.Null(options.TopP);
+ Assert.Null(options.ParallelToolCallsEnabled);
+ Assert.Null(options.MaxCompletionTokens);
+ Assert.Null(options.MaxPromptTokens);
+ Assert.Null(options.TruncationMessageCount);
+ Assert.Null(options.EnableJsonResponse);
+ Assert.False(options.EnableCodeInterpreter);
+ Assert.False(options.EnableFileSearch);
+
+ // Act and Assert
+ ValidateSerialization(options);
+ }
+
+ ///
+ /// Verify initialization.
+ ///
+ [Fact]
+ public void OpenAIAssistantInvocationOptionsAssignment()
+ {
+ // Arrange
+ OpenAIAssistantInvocationOptions options =
+ new()
+ {
+ ModelName = "testmodel",
+ Metadata = new Dictionary() { { "a", "1" } },
+ MaxCompletionTokens = 1000,
+ MaxPromptTokens = 1000,
+ ParallelToolCallsEnabled = false,
+ TruncationMessageCount = 12,
+ Temperature = 2,
+ TopP = 0,
+ EnableCodeInterpreter = true,
+ EnableJsonResponse = true,
+ EnableFileSearch = true,
+ };
+
+ // Assert
+ Assert.Equal("testmodel", options.ModelName);
+ Assert.Equal(2, options.Temperature);
+ Assert.Equal(0, options.TopP);
+ Assert.Equal(1000, options.MaxCompletionTokens);
+ Assert.Equal(1000, options.MaxPromptTokens);
+ Assert.Equal(12, options.TruncationMessageCount);
+ Assert.False(options.ParallelToolCallsEnabled);
+ Assert.Single(options.Metadata);
+ Assert.True(options.EnableCodeInterpreter);
+ Assert.True(options.EnableJsonResponse);
+ Assert.True(options.EnableFileSearch);
+
+ // Act and Assert
+ ValidateSerialization(options);
+ }
+
+ private static void ValidateSerialization(OpenAIAssistantInvocationOptions source)
+ {
+ // Act
+ string json = JsonSerializer.Serialize(source);
+
+ OpenAIAssistantInvocationOptions? target = JsonSerializer.Deserialize(json);
+
+ // Assert
+ Assert.NotNull(target);
+ Assert.Equal(source.ModelName, target.ModelName);
+ Assert.Equal(source.Temperature, target.Temperature);
+ Assert.Equal(source.TopP, target.TopP);
+ Assert.Equal(source.MaxCompletionTokens, target.MaxCompletionTokens);
+ Assert.Equal(source.MaxPromptTokens, target.MaxPromptTokens);
+ Assert.Equal(source.TruncationMessageCount, target.TruncationMessageCount);
+ Assert.Equal(source.EnableCodeInterpreter, target.EnableCodeInterpreter);
+ Assert.Equal(source.EnableJsonResponse, target.EnableJsonResponse);
+ Assert.Equal(source.EnableFileSearch, target.EnableFileSearch);
+ AssertCollection.Equal(source.Metadata, target.Metadata);
+ }
+}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs
new file mode 100644
index 000000000000..7799eb26c305
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIClientProviderTests.cs
@@ -0,0 +1,86 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System;
+using System.Net.Http;
+using Azure.Core;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Moq;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.OpenAI;
+
+///
+/// Unit testing of .
+///
+public class OpenAIClientProviderTests
+{
+ ///
+ /// Verify that provisioning of client for Azure OpenAI.
+ ///
+ [Fact]
+ public void VerifyOpenAIClientFactoryTargetAzureByKey()
+ {
+ // Arrange
+ OpenAIClientProvider provider = OpenAIClientProvider.ForAzureOpenAI("key", new Uri("https://localhost"));
+
+ // Assert
+ Assert.NotNull(provider.Client);
+ }
+
+ ///
+ /// Verify that provisioning of client for Azure OpenAI.
+ ///
+ [Fact]
+ public void VerifyOpenAIClientFactoryTargetAzureByCredential()
+ {
+ // Arrange
+ Mock mockCredential = new();
+ OpenAIClientProvider provider = OpenAIClientProvider.ForAzureOpenAI(mockCredential.Object, new Uri("https://localhost"));
+
+ // Assert
+ Assert.NotNull(provider.Client);
+ }
+
+ ///
+ /// Verify that provisioning of client for OpenAI.
+ ///
+ [Theory]
+ [InlineData(null)]
+ [InlineData("http://myproxy:9819")]
+ public void VerifyOpenAIClientFactoryTargetOpenAINoKey(string? endpoint)
+ {
+ // Arrange
+ OpenAIClientProvider provider = OpenAIClientProvider.ForOpenAI(endpoint != null ? new Uri(endpoint) : null);
+
+ // Assert
+ Assert.NotNull(provider.Client);
+ }
+
+ ///
+ /// Verify that provisioning of client for OpenAI.
+ ///
+ [Theory]
+ [InlineData("key", null)]
+ [InlineData("key", "http://myproxy:9819")]
+ public void VerifyOpenAIClientFactoryTargetOpenAIByKey(string key, string? endpoint)
+ {
+ // Arrange
+ OpenAIClientProvider provider = OpenAIClientProvider.ForOpenAI(key, endpoint != null ? new Uri(endpoint) : null);
+
+ // Assert
+ Assert.NotNull(provider.Client);
+ }
+
+ ///
+ /// Verify that the factory can create a client with http proxy.
+ ///
+ [Fact]
+ public void VerifyOpenAIClientFactoryWithHttpClient()
+ {
+ // Arrange
+ using HttpClient httpClient = new() { BaseAddress = new Uri("http://myproxy:9819") };
+ OpenAIClientProvider provider = OpenAIClientProvider.ForOpenAI(httpClient: httpClient);
+
+ // Assert
+ Assert.NotNull(provider.Client);
+ }
+}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIThreadCreationOptionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIThreadCreationOptionsTests.cs
new file mode 100644
index 000000000000..1689bec1f828
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIThreadCreationOptionsTests.cs
@@ -0,0 +1,75 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Collections.Generic;
+using System.Text.Json;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.OpenAI;
+
+///
+/// Unit testing of .
+///
+public class OpenAIThreadCreationOptionsTests
+{
+ ///
+ /// Verify initial state.
+ ///
+ [Fact]
+ public void OpenAIThreadCreationOptionsInitialState()
+ {
+ // Arrange
+ OpenAIThreadCreationOptions options = new();
+
+ // Assert
+ Assert.Null(options.Messages);
+ Assert.Null(options.Metadata);
+ Assert.Null(options.VectorStoreId);
+ Assert.Null(options.CodeInterpreterFileIds);
+
+ // Act and Assert
+ ValidateSerialization(options);
+ }
+
+ ///
+ /// Verify initialization.
+ ///
+ [Fact]
+ public void OpenAIThreadCreationOptionsAssignment()
+ {
+ // Arrange
+ OpenAIThreadCreationOptions options =
+ new()
+ {
+ Messages = [new ChatMessageContent(AuthorRole.User, "test")],
+ VectorStoreId = "#vs",
+ Metadata = new Dictionary() { { "a", "1" } },
+ CodeInterpreterFileIds = ["file1"],
+ };
+
+ // Assert
+ Assert.Single(options.Messages);
+ Assert.Single(options.Metadata);
+ Assert.Equal("#vs", options.VectorStoreId);
+ Assert.Single(options.CodeInterpreterFileIds);
+
+ // Act and Assert
+ ValidateSerialization(options);
+ }
+
+ private static void ValidateSerialization(OpenAIThreadCreationOptions source)
+ {
+ // Act
+ string json = JsonSerializer.Serialize(source);
+
+ OpenAIThreadCreationOptions? target = JsonSerializer.Deserialize(json);
+
+ // Assert
+ Assert.NotNull(target);
+ Assert.Equal(source.VectorStoreId, target.VectorStoreId);
+ AssertCollection.Equal(source.CodeInterpreterFileIds, target.CodeInterpreterFileIds);
+ AssertCollection.Equal(source.Messages, target.Messages, m => m.Items.Count); // ChatMessageContent already validated for deep serialization
+ AssertCollection.Equal(source.Metadata, target.Metadata);
+ }
+}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/RunPollingOptionsTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/RunPollingOptionsTests.cs
new file mode 100644
index 000000000000..e75a962dfc5e
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/OpenAI/RunPollingOptionsTests.cs
@@ -0,0 +1,71 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.OpenAI;
+
+///
+/// Unit testing of .
+///
+public class RunPollingOptionsTests
+{
+ ///
+ /// Verify initial state.
+ ///
+ [Fact]
+ public void RunPollingOptionsInitialStateTest()
+ {
+ // Arrange
+ RunPollingOptions options = new();
+
+ // Assert
+ Assert.Equal(RunPollingOptions.DefaultPollingInterval, options.RunPollingInterval);
+ Assert.Equal(RunPollingOptions.DefaultPollingBackoff, options.RunPollingBackoff);
+ Assert.Equal(RunPollingOptions.DefaultMessageSynchronizationDelay, options.MessageSynchronizationDelay);
+ Assert.Equal(RunPollingOptions.DefaultPollingBackoffThreshold, options.RunPollingBackoffThreshold);
+ }
+
+ /// s
+ /// Verify initialization.
+ ///
+ [Fact]
+ public void RunPollingOptionsAssignmentTest()
+ {
+ // Arrange
+ RunPollingOptions options =
+ new()
+ {
+ RunPollingInterval = TimeSpan.FromSeconds(3),
+ RunPollingBackoff = TimeSpan.FromSeconds(4),
+ RunPollingBackoffThreshold = 8,
+ MessageSynchronizationDelay = TimeSpan.FromSeconds(5),
+ };
+
+ // Assert
+ Assert.Equal(3, options.RunPollingInterval.TotalSeconds);
+ Assert.Equal(4, options.RunPollingBackoff.TotalSeconds);
+ Assert.Equal(5, options.MessageSynchronizationDelay.TotalSeconds);
+ Assert.Equal(8, options.RunPollingBackoffThreshold);
+ }
+
+ /// s
+ /// Verify initialization.
+ ///
+ [Fact]
+ public void RunPollingOptionsGetIntervalTest()
+ {
+ // Arrange
+ RunPollingOptions options =
+ new()
+ {
+ RunPollingInterval = TimeSpan.FromSeconds(3),
+ RunPollingBackoff = TimeSpan.FromSeconds(4),
+ RunPollingBackoffThreshold = 8,
+ };
+
+ // Assert
+ Assert.Equal(options.RunPollingInterval, options.GetPollingInterval(8));
+ Assert.Equal(options.RunPollingBackoff, options.GetPollingInterval(9));
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/.editorconfig b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/.editorconfig
new file mode 100644
index 000000000000..394eef685f21
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/.editorconfig
@@ -0,0 +1,6 @@
+# Suppressing errors for Test projects under dotnet folder
+[*.cs]
+dotnet_diagnostic.CA2007.severity = none # Do not directly await a Task
+dotnet_diagnostic.VSTHRD111.severity = none # Use .ConfigureAwait(bool) is hidden by default, set to none to prevent IDE from changing on autosave
+dotnet_diagnostic.CS1591.severity = none # Missing XML comment for publicly visible type or member
+dotnet_diagnostic.IDE1006.severity = warning # Naming rule violations
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAITestHelper.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAITestHelper.cs
new file mode 100644
index 000000000000..31a7654fcfc6
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/AzureOpenAITestHelper.cs
@@ -0,0 +1,30 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.IO;
+using System.Net.Http;
+
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests;
+
+///
+/// Helper for AzureOpenAI test purposes.
+///
+internal static class AzureOpenAITestHelper
+{
+ ///
+ /// Reads test response from file for mocking purposes.
+ ///
+ /// Name of the file with test response.
+ internal static string GetTestResponse(string fileName)
+ {
+ return File.ReadAllText($"./TestData/{fileName}");
+ }
+
+ ///
+ /// Reads test response from file and create .
+ ///
+ /// Name of the file with test response.
+ internal static StreamContent GetTestResponseAsStream(string fileName)
+ {
+ return new StreamContent(File.OpenRead($"./TestData/{fileName}"));
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Connectors.AzureOpenAI.UnitTests.csproj b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Connectors.AzureOpenAI.UnitTests.csproj
new file mode 100644
index 000000000000..a0a695a6719c
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Connectors.AzureOpenAI.UnitTests.csproj
@@ -0,0 +1,47 @@
+
+
+
+
+ SemanticKernel.Connectors.AzureOpenAI.UnitTests
+ $(AssemblyName)
+ net8.0
+ true
+ enable
+ false
+ $(NoWarn);SKEXP0001;SKEXP0010;CA2007,CA1806,CA1869,CA1861,IDE0300,VSTHRD111,IDE1006
+
+
+
+
+
+
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+ runtime; build; native; contentfiles; analyzers; buildtransitive
+ all
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Always
+
+
+
+
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIKernelBuilderExtensionsTests.cs
new file mode 100644
index 000000000000..d8e8cdac1658
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIKernelBuilderExtensionsTests.cs
@@ -0,0 +1,189 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using Azure.AI.OpenAI;
+using Azure.Core;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.AudioToText;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+using Microsoft.SemanticKernel.Embeddings;
+using Microsoft.SemanticKernel.TextGeneration;
+using Microsoft.SemanticKernel.TextToAudio;
+using Microsoft.SemanticKernel.TextToImage;
+
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Extensions;
+
+///
+/// Unit tests for the kernel builder extensions in the class.
+///
+public sealed class AzureOpenAIKernelBuilderExtensionsTests
+{
+ #region Chat completion
+
+ [Theory]
+ [InlineData(InitializationType.ApiKey)]
+ [InlineData(InitializationType.TokenCredential)]
+ [InlineData(InitializationType.ClientInline)]
+ [InlineData(InitializationType.ClientInServiceProvider)]
+ public void KernelBuilderAddAzureOpenAIChatCompletionAddsValidService(InitializationType type)
+ {
+ // Arrange
+ var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken());
+ var client = new AzureOpenAIClient(new Uri("http://localhost"), "key");
+ var builder = Kernel.CreateBuilder();
+
+ builder.Services.AddSingleton(client);
+
+ // Act
+ builder = type switch
+ {
+ InitializationType.ApiKey => builder.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", "api-key"),
+ InitializationType.TokenCredential => builder.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", credentials),
+ InitializationType.ClientInline => builder.AddAzureOpenAIChatCompletion("deployment-name", client),
+ InitializationType.ClientInServiceProvider => builder.AddAzureOpenAIChatCompletion("deployment-name"),
+ _ => builder
+ };
+
+ // Assert
+ var chatCompletionService = builder.Build().GetRequiredService();
+ Assert.True(chatCompletionService is AzureOpenAIChatCompletionService);
+
+ var textGenerationService = builder.Build().GetRequiredService();
+ Assert.True(textGenerationService is AzureOpenAIChatCompletionService);
+ }
+
+ #endregion
+
+ #region Text embeddings
+
+ [Theory]
+ [InlineData(InitializationType.ApiKey)]
+ [InlineData(InitializationType.TokenCredential)]
+ [InlineData(InitializationType.ClientInline)]
+ [InlineData(InitializationType.ClientInServiceProvider)]
+ public void KernelBuilderAddAzureOpenAITextEmbeddingGenerationAddsValidService(InitializationType type)
+ {
+ // Arrange
+ var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken());
+ var client = new AzureOpenAIClient(new Uri("http://localhost"), "key");
+ var builder = Kernel.CreateBuilder();
+
+ builder.Services.AddSingleton(client);
+
+ // Act
+ builder = type switch
+ {
+ InitializationType.ApiKey => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", "api-key"),
+ InitializationType.TokenCredential => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", credentials),
+ InitializationType.ClientInline => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name", client),
+ InitializationType.ClientInServiceProvider => builder.AddAzureOpenAITextEmbeddingGeneration("deployment-name"),
+ _ => builder
+ };
+
+ // Assert
+ var service = builder.Build().GetRequiredService();
+
+ Assert.NotNull(service);
+ Assert.True(service is AzureOpenAITextEmbeddingGenerationService);
+ }
+
+ #endregion
+
+ #region Text to audio
+
+ [Fact]
+ public void KernelBuilderAddAzureOpenAITextToAudioAddsValidService()
+ {
+ // Arrange
+ var sut = Kernel.CreateBuilder();
+
+ // Act
+ var service = sut.AddAzureOpenAITextToAudio("deployment-name", "https://endpoint", "api-key")
+ .Build()
+ .GetRequiredService();
+
+ // Assert
+ Assert.IsType(service);
+ }
+
+ #endregion
+
+ #region Text to image
+
+ [Theory]
+ [InlineData(InitializationType.ApiKey)]
+ [InlineData(InitializationType.TokenCredential)]
+ [InlineData(InitializationType.ClientInline)]
+ [InlineData(InitializationType.ClientInServiceProvider)]
+ public void KernelBuilderExtensionsAddAzureOpenAITextToImageService(InitializationType type)
+ {
+ // Arrange
+ var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken());
+ var client = new AzureOpenAIClient(new Uri("http://localhost"), "key");
+ var builder = Kernel.CreateBuilder();
+
+ builder.Services.AddSingleton(client);
+
+ // Act
+ builder = type switch
+ {
+ InitializationType.ApiKey => builder.AddAzureOpenAITextToImage("deployment-name", "https://endpoint", "api-key"),
+ InitializationType.TokenCredential => builder.AddAzureOpenAITextToImage("deployment-name", "https://endpoint", credentials),
+ InitializationType.ClientInline => builder.AddAzureOpenAITextToImage("deployment-name", client),
+ InitializationType.ClientInServiceProvider => builder.AddAzureOpenAITextToImage("deployment-name"),
+ _ => builder
+ };
+
+ // Assert
+ var service = builder.Build().GetRequiredService();
+
+ Assert.True(service is AzureOpenAITextToImageService);
+ }
+
+ #endregion
+
+ #region Audio to text
+
+ [Theory]
+ [InlineData(InitializationType.ApiKey)]
+ [InlineData(InitializationType.TokenCredential)]
+ [InlineData(InitializationType.ClientInline)]
+ [InlineData(InitializationType.ClientInServiceProvider)]
+ public void KernelBuilderAddAzureOpenAIAudioToTextAddsValidService(InitializationType type)
+ {
+ // Arrange
+ var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken());
+ var client = new AzureOpenAIClient(new Uri("https://endpoint"), "key");
+ var builder = Kernel.CreateBuilder();
+
+ builder.Services.AddSingleton(client);
+
+ // Act
+ builder = type switch
+ {
+ InitializationType.ApiKey => builder.AddAzureOpenAIAudioToText("deployment-name", "https://endpoint", "api-key"),
+ InitializationType.TokenCredential => builder.AddAzureOpenAIAudioToText("deployment-name", "https://endpoint", credentials),
+ InitializationType.ClientInline => builder.AddAzureOpenAIAudioToText("deployment-name", client),
+ InitializationType.ClientInServiceProvider => builder.AddAzureOpenAIAudioToText("deployment-name"),
+ _ => builder
+ };
+
+ // Assert
+ var service = builder.Build().GetRequiredService();
+
+ Assert.IsType(service);
+ }
+
+ #endregion
+
+ public enum InitializationType
+ {
+ ApiKey,
+ TokenCredential,
+ ClientInline,
+ ClientInServiceProvider,
+ ClientEndpoint,
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIServiceCollectionExtensionsTests.cs
new file mode 100644
index 000000000000..2def01271aa6
--- /dev/null
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Extensions/AzureOpenAIServiceCollectionExtensionsTests.cs
@@ -0,0 +1,189 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using Azure.AI.OpenAI;
+using Azure.Core;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.AudioToText;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
+using Microsoft.SemanticKernel.Embeddings;
+using Microsoft.SemanticKernel.TextGeneration;
+using Microsoft.SemanticKernel.TextToAudio;
+using Microsoft.SemanticKernel.TextToImage;
+
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Extensions;
+
+///
+/// Unit tests for the service collection extensions in the class.
+///
+public sealed class AzureOpenAIServiceCollectionExtensionsTests
+{
+ #region Chat completion
+
+ [Theory]
+ [InlineData(InitializationType.ApiKey)]
+ [InlineData(InitializationType.TokenCredential)]
+ [InlineData(InitializationType.ClientInline)]
+ [InlineData(InitializationType.ClientInServiceProvider)]
+ public void ServiceCollectionAddAzureOpenAIChatCompletionAddsValidService(InitializationType type)
+ {
+ // Arrange
+ var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken());
+ var client = new AzureOpenAIClient(new Uri("http://localhost"), "key");
+ var builder = Kernel.CreateBuilder();
+
+ builder.Services.AddSingleton(client);
+
+ // Act
+ IServiceCollection collection = type switch
+ {
+ InitializationType.ApiKey => builder.Services.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", "api-key"),
+ InitializationType.TokenCredential => builder.Services.AddAzureOpenAIChatCompletion("deployment-name", "https://endpoint", credentials),
+ InitializationType.ClientInline => builder.Services.AddAzureOpenAIChatCompletion("deployment-name", client),
+ InitializationType.ClientInServiceProvider => builder.Services.AddAzureOpenAIChatCompletion("deployment-name"),
+ _ => builder.Services
+ };
+
+ // Assert
+ var chatCompletionService = builder.Build().GetRequiredService();
+ Assert.True(chatCompletionService is AzureOpenAIChatCompletionService);
+
+ var textGenerationService = builder.Build().GetRequiredService();
+ Assert.True(textGenerationService is AzureOpenAIChatCompletionService);
+ }
+
+ #endregion
+
+ #region Text embeddings
+
+ [Theory]
+ [InlineData(InitializationType.ApiKey)]
+ [InlineData(InitializationType.TokenCredential)]
+ [InlineData(InitializationType.ClientInline)]
+ [InlineData(InitializationType.ClientInServiceProvider)]
+ public void ServiceCollectionAddAzureOpenAITextEmbeddingGenerationAddsValidService(InitializationType type)
+ {
+ // Arrange
+ var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken());
+ var client = new AzureOpenAIClient(new Uri("http://localhost"), "key");
+ var builder = Kernel.CreateBuilder();
+
+ builder.Services.AddSingleton(client);
+
+ // Act
+ IServiceCollection collection = type switch
+ {
+ InitializationType.ApiKey => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", "api-key"),
+ InitializationType.TokenCredential => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name", "https://endpoint", credentials),
+ InitializationType.ClientInline => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name", client),
+ InitializationType.ClientInServiceProvider => builder.Services.AddAzureOpenAITextEmbeddingGeneration("deployment-name"),
+ _ => builder.Services
+ };
+
+ // Assert
+ var service = builder.Build().GetRequiredService();
+
+ Assert.NotNull(service);
+ Assert.True(service is AzureOpenAITextEmbeddingGenerationService);
+ }
+
+ #endregion
+
+ #region Text to audio
+
+ [Fact]
+ public void ServiceCollectionAddAzureOpenAITextToAudioAddsValidService()
+ {
+ // Arrange
+ var sut = new ServiceCollection();
+
+ // Act
+ var service = sut.AddAzureOpenAITextToAudio("deployment-name", "https://endpoint", "api-key")
+ .BuildServiceProvider()
+ .GetRequiredService();
+
+ // Assert
+ Assert.IsType(service);
+ }
+
+ #endregion
+
+ #region Text to image
+
+ [Theory]
+ [InlineData(InitializationType.ApiKey)]
+ [InlineData(InitializationType.TokenCredential)]
+ [InlineData(InitializationType.ClientInline)]
+ [InlineData(InitializationType.ClientInServiceProvider)]
+ public void ServiceCollectionExtensionsAddAzureOpenAITextToImageService(InitializationType type)
+ {
+ // Arrange
+ var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken());
+ var client = new AzureOpenAIClient(new Uri("http://localhost"), "key");
+ var builder = Kernel.CreateBuilder();
+
+ builder.Services.AddSingleton(client);
+
+ // Act
+ IServiceCollection collection = type switch
+ {
+ InitializationType.ApiKey => builder.Services.AddAzureOpenAITextToImage("deployment-name", "https://endpoint", "api-key"),
+ InitializationType.TokenCredential => builder.Services.AddAzureOpenAITextToImage("deployment-name", "https://endpoint", credentials),
+ InitializationType.ClientInline => builder.Services.AddAzureOpenAITextToImage("deployment-name", client),
+ InitializationType.ClientInServiceProvider => builder.Services.AddAzureOpenAITextToImage("deployment-name"),
+ _ => builder.Services
+ };
+
+ // Assert
+ var service = builder.Build().GetRequiredService();
+
+ Assert.True(service is AzureOpenAITextToImageService);
+ }
+
+ #endregion
+
+ #region Audio to text
+
+ [Theory]
+ [InlineData(InitializationType.ApiKey)]
+ [InlineData(InitializationType.TokenCredential)]
+ [InlineData(InitializationType.ClientInline)]
+ [InlineData(InitializationType.ClientInServiceProvider)]
+ public void ServiceCollectionAddAzureOpenAIAudioToTextAddsValidService(InitializationType type)
+ {
+ // Arrange
+ var credentials = DelegatedTokenCredential.Create((_, _) => new AccessToken());
+ var client = new AzureOpenAIClient(new Uri("https://endpoint"), "key");
+ var builder = Kernel.CreateBuilder();
+
+ builder.Services.AddSingleton(client);
+
+ // Act
+ IServiceCollection collection = type switch
+ {
+ InitializationType.ApiKey => builder.Services.AddAzureOpenAIAudioToText("deployment-name", "https://endpoint", "api-key"),
+ InitializationType.TokenCredential => builder.Services.AddAzureOpenAIAudioToText("deployment-name", "https://endpoint", credentials),
+ InitializationType.ClientInline => builder.Services.AddAzureOpenAIAudioToText("deployment-name", client),
+ InitializationType.ClientInServiceProvider => builder.Services.AddAzureOpenAIAudioToText("deployment-name"),
+ _ => builder.Services
+ };
+
+ // Assert
+ var service = builder.Build().GetRequiredService();
+
+ Assert.True(service is AzureOpenAIAudioToTextService);
+ }
+
+ #endregion
+
+ public enum InitializationType
+ {
+ ApiKey,
+ TokenCredential,
+ ClientInline,
+ ClientInServiceProvider,
+ ClientEndpoint,
+ }
+}
diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AudioToText/AzureOpenAIAudioToTextServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIAudioToTextServiceTests.cs
similarity index 56%
rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AudioToText/AzureOpenAIAudioToTextServiceTests.cs
rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIAudioToTextServiceTests.cs
index 6100c434c878..a7f2f6b5a83d 100644
--- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/AudioToText/AzureOpenAIAudioToTextServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIAudioToTextServiceTests.cs
@@ -2,16 +2,18 @@
using System;
using System.Net.Http;
+using System.Text;
using System.Threading.Tasks;
using Azure.AI.OpenAI;
using Azure.Core;
using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Microsoft.SemanticKernel.Services;
using Moq;
-using Xunit;
-namespace SemanticKernel.Connectors.UnitTests.OpenAI.AudioToText;
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Services;
///
/// Unit tests for class.
@@ -36,12 +38,12 @@ public void ConstructorWithApiKeyWorksCorrectly(bool includeLoggerFactory)
{
// Arrange & Act
var service = includeLoggerFactory ?
- new AzureOpenAIAudioToTextService("deployment-name", "https://endpoint", "api-key", "model-id", loggerFactory: this._mockLoggerFactory.Object) :
- new AzureOpenAIAudioToTextService("deployment-name", "https://endpoint", "api-key", "model-id");
+ new AzureOpenAIAudioToTextService("deployment", "https://endpoint", "api-key", "model-id", loggerFactory: this._mockLoggerFactory.Object) :
+ new AzureOpenAIAudioToTextService("deployment", "https://endpoint", "api-key", "model-id");
// Assert
- Assert.NotNull(service);
- Assert.Equal("model-id", service.Attributes["ModelId"]);
+ Assert.Equal("model-id", service.Attributes[AIServiceExtensions.ModelIdKey]);
+ Assert.Equal("deployment", service.Attributes[AzureClientCore.DeploymentNameKey]);
}
[Theory]
@@ -56,8 +58,8 @@ public void ConstructorWithTokenCredentialWorksCorrectly(bool includeLoggerFacto
new AzureOpenAIAudioToTextService("deployment", "https://endpoint", credentials, "model-id");
// Assert
- Assert.NotNull(service);
- Assert.Equal("model-id", service.Attributes["ModelId"]);
+ Assert.Equal("model-id", service.Attributes[AIServiceExtensions.ModelIdKey]);
+ Assert.Equal("deployment", service.Attributes[AzureClientCore.DeploymentNameKey]);
}
[Theory]
@@ -66,14 +68,26 @@ public void ConstructorWithTokenCredentialWorksCorrectly(bool includeLoggerFacto
public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory)
{
// Arrange & Act
- var client = new OpenAIClient("key");
+ var client = new AzureOpenAIClient(new Uri("http://host"), "key");
var service = includeLoggerFactory ?
new AzureOpenAIAudioToTextService("deployment", client, "model-id", loggerFactory: this._mockLoggerFactory.Object) :
new AzureOpenAIAudioToTextService("deployment", client, "model-id");
// Assert
- Assert.NotNull(service);
- Assert.Equal("model-id", service.Attributes["ModelId"]);
+ Assert.Equal("model-id", service.Attributes[AIServiceExtensions.ModelIdKey]);
+ Assert.Equal("deployment", service.Attributes[AzureClientCore.DeploymentNameKey]);
+ }
+
+ [Fact]
+ public void ItThrowsIfDeploymentNameIsNotProvided()
+ {
+ // Act & Assert
+ Assert.Throws(() => new AzureOpenAIAudioToTextService(" ", "http://host", "apikey"));
+ Assert.Throws(() => new AzureOpenAIAudioToTextService(" ", azureOpenAIClient: new(new Uri("http://host"), "apikey")));
+ Assert.Throws(() => new AzureOpenAIAudioToTextService("", "http://host", "apikey"));
+ Assert.Throws(() => new AzureOpenAIAudioToTextService("", azureOpenAIClient: new(new Uri("http://host"), "apikey")));
+ Assert.Throws(() => new AzureOpenAIAudioToTextService(null!, "http://host", "apikey"));
+ Assert.Throws(() => new AzureOpenAIAudioToTextService(null!, azureOpenAIClient: new(new Uri("http://host"), "apikey")));
}
[Theory]
@@ -81,7 +95,7 @@ public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory)
public async Task GetTextContentWithInvalidSettingsThrowsExceptionAsync(OpenAIAudioToTextExecutionSettings? settings, Type expectedExceptionType)
{
// Arrange
- var service = new AzureOpenAIAudioToTextService("deployment-name", "https://endpoint", "api-key", "model-id", this._httpClient);
+ var service = new AzureOpenAIAudioToTextService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK)
{
Content = new StringContent("Test audio-to-text response")
@@ -95,6 +109,34 @@ public async Task GetTextContentWithInvalidSettingsThrowsExceptionAsync(OpenAIAu
Assert.IsType(expectedExceptionType, exception);
}
+ [Theory]
+ [InlineData("verbose_json")]
+ [InlineData("json")]
+ [InlineData("vtt")]
+ [InlineData("srt")]
+ public async Task ItRespectResultFormatExecutionSettingAsync(string format)
+ {
+ // Arrange
+ var service = new AzureOpenAIAudioToTextService("deployment", "https://endpoint", "api-key", httpClient: this._httpClient);
+ this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK)
+ {
+ Content = new StringContent("Test audio-to-text response")
+ };
+
+ // Act
+ var settings = new OpenAIAudioToTextExecutionSettings("file.mp3") { ResponseFormat = format };
+ var result = await service.GetTextContentsAsync(new AudioContent(new BinaryData("data"), mimeType: null), settings);
+
+ // Assert
+ Assert.NotNull(this._messageHandlerStub.RequestContent);
+ Assert.NotNull(result);
+
+ var multiPartData = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContent!);
+ var multiPartBreak = multiPartData.Substring(0, multiPartData.IndexOf("\r\n", StringComparison.OrdinalIgnoreCase));
+
+ Assert.Contains($"{format}\r\n{multiPartBreak}", multiPartData);
+ }
+
[Fact]
public async Task GetTextContentByDefaultWorksCorrectlyAsync()
{
diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs
similarity index 65%
rename from dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs
rename to dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs
index 22be8458c2cc..9302b75c39bf 100644
--- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Services/AzureOpenAIChatCompletionServiceTests.cs
@@ -10,16 +10,18 @@
using System.Text.Json;
using System.Threading.Tasks;
using Azure.AI.OpenAI;
+using Azure.AI.OpenAI.Chat;
using Azure.Core;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using Moq;
-using Xunit;
+using OpenAI.Chat;
-namespace SemanticKernel.Connectors.UnitTests.OpenAI.ChatCompletion;
+namespace SemanticKernel.Connectors.AzureOpenAI.UnitTests.Services;
///
/// Unit tests for
@@ -80,7 +82,7 @@ public void ConstructorWithTokenCredentialWorksCorrectly(bool includeLoggerFacto
public void ConstructorWithOpenAIClientWorksCorrectly(bool includeLoggerFactory)
{
// Arrange & Act
- var client = new OpenAIClient("key");
+ var client = new AzureOpenAIClient(new Uri("http://host"), "key");
var service = includeLoggerFactory ?
new AzureOpenAIChatCompletionService("deployment", client, "model-id", loggerFactory: this._mockLoggerFactory.Object) :
new AzureOpenAIChatCompletionService("deployment", client, "model-id");
@@ -97,7 +99,7 @@ public async Task GetTextContentsWorksCorrectlyAsync()
var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
{
- Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
+ Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
});
// Act
@@ -107,73 +109,36 @@ public async Task GetTextContentsWorksCorrectlyAsync()
Assert.True(result.Count > 0);
Assert.Equal("Test chat response", result[0].Text);
- var usage = result[0].Metadata?["Usage"] as CompletionsUsage;
+ var usage = result[0].Metadata?["Usage"] as ChatTokenUsage;
Assert.NotNull(usage);
- Assert.Equal(55, usage.PromptTokens);
- Assert.Equal(100, usage.CompletionTokens);
+ Assert.Equal(55, usage.InputTokens);
+ Assert.Equal(100, usage.OutputTokens);
Assert.Equal(155, usage.TotalTokens);
}
- [Fact]
- public async Task GetChatMessageContentsWithEmptyChoicesThrowsExceptionAsync()
- {
- // Arrange
- var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
- this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
- {
- Content = new StringContent("{\"id\":\"response-id\",\"object\":\"chat.completion\",\"created\":1704208954,\"model\":\"gpt-4\",\"choices\":[],\"usage\":{\"prompt_tokens\":55,\"completion_tokens\":100,\"total_tokens\":155},\"system_fingerprint\":null}")
- });
-
- // Act & Assert
- var exception = await Assert.ThrowsAsync(() => service.GetChatMessageContentsAsync([]));
-
- Assert.Equal("Chat completions not found", exception.Message);
- }
-
- [Theory]
- [InlineData(0)]
- [InlineData(129)]
- public async Task GetChatMessageContentsWithInvalidResultsPerPromptValueThrowsExceptionAsync(int resultsPerPrompt)
- {
- // Arrange
- var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
- var settings = new OpenAIPromptExecutionSettings { ResultsPerPrompt = resultsPerPrompt };
-
- // Act & Assert
- var exception = await Assert.ThrowsAsync(() => service.GetChatMessageContentsAsync([], settings));
-
- Assert.Contains("The value must be in range between", exception.Message, StringComparison.OrdinalIgnoreCase);
- }
-
[Fact]
public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync()
{
// Arrange
var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
- var settings = new OpenAIPromptExecutionSettings()
+ var settings = new AzureOpenAIPromptExecutionSettings()
{
MaxTokens = 123,
Temperature = 0.6,
TopP = 0.5,
FrequencyPenalty = 1.6,
PresencePenalty = 1.2,
- ResultsPerPrompt = 5,
Seed = 567,
TokenSelectionBiases = new Dictionary { { 2, 3 } },
StopSequences = ["stop_sequence"],
Logprobs = true,
TopLogprobs = 5,
- AzureChatExtensionsOptions = new AzureChatExtensionsOptions
+ AzureChatDataSource = new AzureSearchChatDataSource()
{
- Extensions =
- {
- new AzureSearchChatExtensionConfiguration
- {
- SearchEndpoint = new Uri("http://test-search-endpoint"),
- IndexName = "test-index-name"
- }
- }
+ Endpoint = new Uri("http://test-search-endpoint"),
+ IndexName = "test-index-name",
+ Authentication = DataSourceAuthentication.FromApiKey("api-key"),
}
};
@@ -185,7 +150,7 @@ public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync()
this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
{
- Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
+ Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
});
// Act
@@ -227,7 +192,6 @@ public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync()
Assert.Equal(0.5, content.GetProperty("top_p").GetDouble());
Assert.Equal(1.6, content.GetProperty("frequency_penalty").GetDouble());
Assert.Equal(1.2, content.GetProperty("presence_penalty").GetDouble());
- Assert.Equal(5, content.GetProperty("n").GetInt32());
Assert.Equal(567, content.GetProperty("seed").GetInt32());
Assert.Equal(3, content.GetProperty("logit_bias").GetProperty("2").GetInt32());
Assert.Equal("stop_sequence", content.GetProperty("stop")[0].GetString());
@@ -249,18 +213,18 @@ public async Task GetChatMessageContentsHandlesResponseFormatCorrectlyAsync(obje
{
// Arrange
var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
- var settings = new OpenAIPromptExecutionSettings
+ var settings = new AzureOpenAIPromptExecutionSettings
{
ResponseFormat = responseFormat
};
this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
{
- Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
+ Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
});
// Act
- var result = await service.GetChatMessageContentsAsync([], settings);
+ var result = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings);
// Assert
var requestContent = this._messageHandlerStub.RequestContents[0];
@@ -279,28 +243,28 @@ public async Task GetChatMessageContentsWorksCorrectlyAsync(ToolCallBehavior beh
// Arrange
var kernel = Kernel.CreateBuilder().Build();
var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
- var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = behavior };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = behavior };
this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
{
- Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
+ Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
});
// Act
- var result = await service.GetChatMessageContentsAsync([], settings, kernel);
+ var result = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings, kernel);
// Assert
Assert.True(result.Count > 0);
Assert.Equal("Test chat response", result[0].Content);
- var usage = result[0].Metadata?["Usage"] as CompletionsUsage;
+ var usage = result[0].Metadata?["Usage"] as ChatTokenUsage;
Assert.NotNull(usage);
- Assert.Equal(55, usage.PromptTokens);
- Assert.Equal(100, usage.CompletionTokens);
+ Assert.Equal(55, usage.InputTokens);
+ Assert.Equal(100, usage.OutputTokens);
Assert.Equal(155, usage.TotalTokens);
- Assert.Equal("stop", result[0].Metadata?["FinishReason"]);
+ Assert.Equal("Stop", result[0].Metadata?["FinishReason"]);
}
[Fact]
@@ -325,15 +289,15 @@ public async Task GetChatMessageContentsWithFunctionCallAsync()
kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]));
var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object);
- var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions };
- using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_multiple_function_calls_test_response.json")) };
- using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) };
+ using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_multiple_function_calls_test_response.json")) };
+ using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) };
this._messageHandlerStub.ResponsesToReturn = [response1, response2];
// Act
- var result = await service.GetChatMessageContentsAsync([], settings, kernel);
+ var result = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings, kernel);
// Assert
Assert.True(result.Count > 0);
@@ -361,19 +325,19 @@ public async Task GetChatMessageContentsWithFunctionCallMaximumAutoInvokeAttempt
kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]));
var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object);
- var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions };
var responses = new List();
for (var i = 0; i < ModelResponsesCount; i++)
{
- responses.Add(new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_single_function_call_test_response.json")) });
+ responses.Add(new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_single_function_call_test_response.json")) });
}
this._messageHandlerStub.ResponsesToReturn = responses;
// Act
- var result = await service.GetChatMessageContentsAsync([], settings, kernel);
+ var result = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings, kernel);
// Assert
Assert.Equal(DefaultMaximumAutoInvokeAttempts, functionCallCount);
@@ -398,15 +362,15 @@ public async Task GetChatMessageContentsWithRequiredFunctionCallAsync()
kernel.Plugins.Add(plugin);
var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object);
- var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.RequireFunction(openAIFunction, autoInvoke: true) };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.RequireFunction(openAIFunction, autoInvoke: true) };
- using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_single_function_call_test_response.json")) };
- using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) };
+ using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_single_function_call_test_response.json")) };
+ using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json")) };
this._messageHandlerStub.ResponsesToReturn = [response1, response2];
// Act
- var result = await service.GetChatMessageContentsAsync([], settings, kernel);
+ var result = await service.GetChatMessageContentsAsync(new ChatHistory("System message"), settings, kernel);
// Assert
Assert.Equal(1, functionCallCount);
@@ -434,7 +398,7 @@ public async Task GetStreamingTextContentsWorksCorrectlyAsync()
{
// Arrange
var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
- using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")));
+ using var stream = new MemoryStream(Encoding.UTF8.GetBytes(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")));
this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
{
@@ -448,7 +412,67 @@ public async Task GetStreamingTextContentsWorksCorrectlyAsync()
Assert.Equal("Test chat streaming response", enumerator.Current.Text);
await enumerator.MoveNextAsync();
- Assert.Equal("stop", enumerator.Current.Metadata?["FinishReason"]);
+ Assert.Equal("Stop", enumerator.Current.Metadata?["FinishReason"]);
+ }
+
+ [Fact]
+ public async Task GetStreamingChatContentsWithAsynchronousFilterWorksCorrectlyAsync()
+ {
+ // Arrange
+ var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
+ using var stream = new MemoryStream(Encoding.UTF8.GetBytes(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_async_filter_response.txt")));
+
+ this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
+ {
+ Content = new StreamContent(stream)
+ });
+
+ // Act & Assert
+ var enumerator = service.GetStreamingChatMessageContentsAsync("Prompt").GetAsyncEnumerator();
+
+#pragma warning disable AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
+ await enumerator.MoveNextAsync();
+ var message = enumerator.Current;
+
+ Assert.IsType(message.InnerContent);
+ var update = (StreamingChatCompletionUpdate)message.InnerContent;
+ var promptResults = update.GetContentFilterResultForPrompt();
+ Assert.Equal(ContentFilterSeverity.Safe, promptResults.Hate.Severity);
+ Assert.Equal(ContentFilterSeverity.Safe, promptResults.Sexual.Severity);
+ Assert.Equal(ContentFilterSeverity.Safe, promptResults.Violence.Severity);
+ Assert.Equal(ContentFilterSeverity.Safe, promptResults.SelfHarm.Severity);
+ Assert.False(promptResults.Jailbreak.Detected);
+
+ await enumerator.MoveNextAsync();
+ message = enumerator.Current;
+
+ await enumerator.MoveNextAsync();
+ message = enumerator.Current;
+
+ await enumerator.MoveNextAsync();
+ message = enumerator.Current;
+
+ await enumerator.MoveNextAsync();
+ message = enumerator.Current;
+
+ Assert.IsType(message.InnerContent);
+ update = (StreamingChatCompletionUpdate)message.InnerContent;
+
+ var filterResults = update.GetContentFilterResultForResponse();
+ Assert.Equal(ContentFilterSeverity.Safe, filterResults.Hate.Severity);
+ Assert.Equal(ContentFilterSeverity.Safe, filterResults.Sexual.Severity);
+ Assert.Equal(ContentFilterSeverity.Safe, filterResults.SelfHarm.Severity);
+ Assert.Equal(ContentFilterSeverity.Safe, filterResults.Violence.Severity);
+
+ await enumerator.MoveNextAsync();
+ message = enumerator.Current;
+
+ Assert.IsType(message.InnerContent);
+ update = (StreamingChatCompletionUpdate)message.InnerContent;
+ filterResults = update.GetContentFilterResultForResponse();
+ Assert.False(filterResults.ProtectedMaterialCode.Detected);
+ Assert.False(filterResults.ProtectedMaterialText.Detected);
+#pragma warning restore AOAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
}
[Fact]
@@ -456,7 +480,7 @@ public async Task GetStreamingChatMessageContentsWorksCorrectlyAsync()
{
// Arrange
var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
- using var stream = new MemoryStream(Encoding.UTF8.GetBytes(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")));
+ using var stream = new MemoryStream(Encoding.UTF8.GetBytes(AzureOpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")));
this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
{
@@ -470,7 +494,7 @@ public async Task GetStreamingChatMessageContentsWorksCorrectlyAsync()
Assert.Equal("Test chat streaming response", enumerator.Current.Content);
await enumerator.MoveNextAsync();
- Assert.Equal("stop", enumerator.Current.Metadata?["FinishReason"]);
+ Assert.Equal("Stop", enumerator.Current.Metadata?["FinishReason"]);
}
[Fact]
@@ -495,10 +519,10 @@ public async Task GetStreamingChatMessageContentsWithFunctionCallAsync()
kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function1, function2]));
var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object);
- var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions };
- using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_multiple_function_calls_test_response.txt")) };
- using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")) };
+ using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_multiple_function_calls_test_response.txt") };
+ using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_test_response.txt") };
this._messageHandlerStub.ResponsesToReturn = [response1, response2];
@@ -507,10 +531,10 @@ public async Task GetStreamingChatMessageContentsWithFunctionCallAsync()
await enumerator.MoveNextAsync();
Assert.Equal("Test chat streaming response", enumerator.Current.Content);
- Assert.Equal("tool_calls", enumerator.Current.Metadata?["FinishReason"]);
+ Assert.Equal("ToolCalls", enumerator.Current.Metadata?["FinishReason"]);
await enumerator.MoveNextAsync();
- Assert.Equal("tool_calls", enumerator.Current.Metadata?["FinishReason"]);
+ Assert.Equal("ToolCalls", enumerator.Current.Metadata?["FinishReason"]);
// Keep looping until the end of stream
while (await enumerator.MoveNextAsync())
@@ -539,13 +563,13 @@ public async Task GetStreamingChatMessageContentsWithFunctionCallMaximumAutoInvo
kernel.Plugins.Add(KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]));
var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object);
- var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions };
var responses = new List();
for (var i = 0; i < ModelResponsesCount; i++)
{
- responses.Add(new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_single_function_call_test_response.txt")) });
+ responses.Add(new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_single_function_call_test_response.txt") });
}
this._messageHandlerStub.ResponsesToReturn = responses;
@@ -578,10 +602,10 @@ public async Task GetStreamingChatMessageContentsWithRequiredFunctionCallAsync()
kernel.Plugins.Add(plugin);
var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient, this._mockLoggerFactory.Object);
- var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.RequireFunction(openAIFunction, autoInvoke: true) };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.RequireFunction(openAIFunction, autoInvoke: true) };
- using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_single_function_call_test_response.txt")) };
- using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_streaming_test_response.txt")) };
+ using var response1 = new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_single_function_call_test_response.txt") };
+ using var response2 = new HttpResponseMessage(HttpStatusCode.OK) { Content = AzureOpenAITestHelper.GetTestResponseAsStream("chat_completion_streaming_test_response.txt") };
this._messageHandlerStub.ResponsesToReturn = [response1, response2];
@@ -591,7 +615,7 @@ public async Task GetStreamingChatMessageContentsWithRequiredFunctionCallAsync()
// Function Tool Call Streaming (One Chunk)
await enumerator.MoveNextAsync();
Assert.Equal("Test chat streaming response", enumerator.Current.Content);
- Assert.Equal("tool_calls", enumerator.Current.Metadata?["FinishReason"]);
+ Assert.Equal("ToolCalls", enumerator.Current.Metadata?["FinishReason"]);
// Chat Completion Streaming (1st Chunk)
await enumerator.MoveNextAsync();
@@ -599,7 +623,7 @@ public async Task GetStreamingChatMessageContentsWithRequiredFunctionCallAsync()
// Chat Completion Streaming (2nd Chunk)
await enumerator.MoveNextAsync();
- Assert.Equal("stop", enumerator.Current.Metadata?["FinishReason"]);
+ Assert.Equal("Stop", enumerator.Current.Metadata?["FinishReason"]);
Assert.Equal(1, functionCallCount);
@@ -629,11 +653,11 @@ public async Task GetChatMessageContentsUsesPromptAndSettingsCorrectlyAsync()
const string SystemMessage = "This is test system message";
var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
- var settings = new OpenAIPromptExecutionSettings() { ChatSystemPrompt = SystemMessage };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ChatSystemPrompt = SystemMessage };
this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
{
- Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
+ Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
});
IKernelBuilder builder = Kernel.CreateBuilder();
@@ -673,11 +697,11 @@ public async Task GetChatMessageContentsWithChatMessageContentItemCollectionAndS
const string CollectionItemPrompt = "This is collection item prompt";
var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
- var settings = new OpenAIPromptExecutionSettings() { ChatSystemPrompt = SystemMessage };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ChatSystemPrompt = SystemMessage };
this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
{
- Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
+ Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
});
var chatHistory = new ChatHistory();
@@ -727,9 +751,9 @@ public async Task GetChatMessageContentsWithChatMessageContentItemCollectionAndS
public async Task FunctionCallsShouldBePropagatedToCallersViaChatMessageItemsOfTypeFunctionCallContentAsync()
{
// Arrange
- this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(System.Net.HttpStatusCode.OK)
+ this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
{
- Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_multiple_function_calls_test_response.json"))
+ Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_multiple_function_calls_test_response.json"))
});
var sut = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
@@ -737,7 +761,7 @@ public async Task FunctionCallsShouldBePropagatedToCallersViaChatMessageItemsOfT
var chatHistory = new ChatHistory();
chatHistory.AddUserMessage("Fake prompt");
- var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions };
// Act
var result = await sut.GetChatMessageContentAsync(chatHistory, settings);
@@ -789,9 +813,9 @@ public async Task FunctionCallsShouldBePropagatedToCallersViaChatMessageItemsOfT
public async Task FunctionCallsShouldBeReturnedToLLMAsync()
{
// Arrange
- this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(System.Net.HttpStatusCode.OK)
+ this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
{
- Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
+ Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
});
var sut = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
@@ -807,7 +831,7 @@ public async Task FunctionCallsShouldBeReturnedToLLMAsync()
new ChatMessageContent(AuthorRole.Assistant, items)
];
- var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions };
// Act
await sut.GetChatMessageContentAsync(chatHistory, settings);
@@ -847,9 +871,9 @@ public async Task FunctionCallsShouldBeReturnedToLLMAsync()
public async Task FunctionResultsCanBeProvidedToLLMAsOneResultPerChatMessageAsync()
{
// Arrange
- this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(System.Net.HttpStatusCode.OK)
+ this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
{
- Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
+ Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
});
var sut = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
@@ -866,7 +890,7 @@ public async Task FunctionResultsCanBeProvidedToLLMAsOneResultPerChatMessageAsyn
])
};
- var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions };
// Act
await sut.GetChatMessageContentAsync(chatHistory, settings);
@@ -895,9 +919,9 @@ public async Task FunctionResultsCanBeProvidedToLLMAsOneResultPerChatMessageAsyn
public async Task FunctionResultsCanBeProvidedToLLMAsManyResultsInOneChatMessageAsync()
{
// Arrange
- this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(System.Net.HttpStatusCode.OK)
+ this._messageHandlerStub.ResponsesToReturn.Add(new HttpResponseMessage(HttpStatusCode.OK)
{
- Content = new StringContent(OpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
+ Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
});
var sut = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
@@ -911,7 +935,7 @@ public async Task FunctionResultsCanBeProvidedToLLMAsManyResultsInOneChatMessage
])
};
- var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions };
+ var settings = new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions };
// Act
await sut.GetChatMessageContentAsync(chatHistory, settings);
@@ -936,6 +960,150 @@ public async Task FunctionResultsCanBeProvidedToLLMAsManyResultsInOneChatMessage
Assert.Equal("2", assistantMessage2.GetProperty("tool_call_id").GetString());
}
+ [Fact]
+ public async Task GetChatMessageContentShouldSendMutatedChatHistoryToLLM()
+ {
+ // Arrange
+ static void MutateChatHistory(AutoFunctionInvocationContext context, Func next)
+ {
+ // Remove the function call messages from the chat history to reduce token count.
+ context.ChatHistory.RemoveRange(1, 2); // Remove the `Date` function call and function result messages.
+
+ next(context);
+ }
+
+ var kernel = new Kernel();
+ kernel.ImportPluginFromFunctions("MyPlugin", [KernelFunctionFactory.CreateFromMethod(() => "rainy", "GetCurrentWeather")]);
+ kernel.AutoFunctionInvocationFilters.Add(new AutoFunctionInvocationFilter(MutateChatHistory));
+
+ using var firstResponse = new HttpResponseMessage(System.Net.HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_single_function_call_test_response.json")) };
+ this._messageHandlerStub.ResponsesToReturn.Add(firstResponse);
+
+ using var secondResponse = new HttpResponseMessage(System.Net.HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_test_response.json")) };
+ this._messageHandlerStub.ResponsesToReturn.Add(secondResponse);
+
+ var sut = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient);
+
+ var chatHistory = new ChatHistory
+ {
+ new ChatMessageContent(AuthorRole.User, "What time is it?"),
+ new ChatMessageContent(AuthorRole.Assistant, [
+ new FunctionCallContent("Date", "TimePlugin", "2")
+ ]),
+ new ChatMessageContent(AuthorRole.Tool, [
+ new FunctionResultContent("Date", "TimePlugin", "2", "rainy")
+ ]),
+ new ChatMessageContent(AuthorRole.Assistant, "08/06/2024 00:00:00"),
+ new ChatMessageContent(AuthorRole.User, "Given the current time of day and weather, what is the likely color of the sky in Boston?")
+ };
+
+ // Act
+ await sut.GetChatMessageContentAsync(chatHistory, new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }, kernel);
+
+ // Assert
+ var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContents[1]!);
+ Assert.NotNull(actualRequestContent);
+
+ var optionsJson = JsonSerializer.Deserialize(actualRequestContent);
+
+ var messages = optionsJson.GetProperty("messages");
+ Assert.Equal(5, messages.GetArrayLength());
+
+ var userFirstPrompt = messages[0];
+ Assert.Equal("user", userFirstPrompt.GetProperty("role").GetString());
+ Assert.Equal("What time is it?", userFirstPrompt.GetProperty("content").ToString());
+
+ var assistantFirstResponse = messages[1];
+ Assert.Equal("assistant", assistantFirstResponse.GetProperty("role").GetString());
+ Assert.Equal("08/06/2024 00:00:00", assistantFirstResponse.GetProperty("content").GetString());
+
+ var userSecondPrompt = messages[2];
+ Assert.Equal("user", userSecondPrompt.GetProperty("role").GetString());
+ Assert.Equal("Given the current time of day and weather, what is the likely color of the sky in Boston?", userSecondPrompt.GetProperty("content").ToString());
+
+ var assistantSecondResponse = messages[3];
+ Assert.Equal("assistant", assistantSecondResponse.GetProperty("role").GetString());
+ Assert.Equal("1", assistantSecondResponse.GetProperty("tool_calls")[0].GetProperty("id").GetString());
+ Assert.Equal("MyPlugin-GetCurrentWeather", assistantSecondResponse.GetProperty("tool_calls")[0].GetProperty("function").GetProperty("name").GetString());
+
+ var functionResult = messages[4];
+ Assert.Equal("tool", functionResult.GetProperty("role").GetString());
+ Assert.Equal("rainy", functionResult.GetProperty("content").GetString());
+ }
+
+ [Fact]
+ public async Task GetStreamingChatMessageContentsShouldSendMutatedChatHistoryToLLM()
+ {
+ // Arrange
+ static void MutateChatHistory(AutoFunctionInvocationContext context, Func next)
+ {
+ // Remove the function call messages from the chat history to reduce token count.
+ context.ChatHistory.RemoveRange(1, 2); // Remove the `Date` function call and function result messages.
+
+ next(context);
+ }
+
+ var kernel = new Kernel();
+ kernel.ImportPluginFromFunctions("MyPlugin", [KernelFunctionFactory.CreateFromMethod(() => "rainy", "GetCurrentWeather")]);
+ kernel.AutoFunctionInvocationFilters.Add(new AutoFunctionInvocationFilter(MutateChatHistory));
+
+ using var firstResponse = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_single_function_call_test_response.txt")) };
+ this._messageHandlerStub.ResponsesToReturn.Add(firstResponse);
+
+ using var secondResponse = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(File.OpenRead("TestData/chat_completion_streaming_test_response.txt")) };
+ this._messageHandlerStub.ResponsesToReturn.Add(secondResponse);
+
+ var sut = new OpenAIChatCompletionService(modelId: "gpt-3.5-turbo", apiKey: "NOKEY", httpClient: this._httpClient);
+
+ var chatHistory = new ChatHistory
+ {
+ new ChatMessageContent(AuthorRole.User, "What time is it?"),
+ new ChatMessageContent(AuthorRole.Assistant, [
+ new FunctionCallContent("Date", "TimePlugin", "2")
+ ]),
+ new ChatMessageContent(AuthorRole.Tool, [
+ new FunctionResultContent("Date", "TimePlugin", "2", "rainy")
+ ]),
+ new ChatMessageContent(AuthorRole.Assistant, "08/06/2024 00:00:00"),
+ new ChatMessageContent(AuthorRole.User, "Given the current time of day and weather, what is the likely color of the sky in Boston?")
+ };
+
+ // Act
+ await foreach (var update in sut.GetStreamingChatMessageContentsAsync(chatHistory, new AzureOpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions }, kernel))
+ {
+ }
+
+ // Assert
+ var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContents[1]!);
+ Assert.NotNull(actualRequestContent);
+
+ var optionsJson = JsonSerializer.Deserialize(actualRequestContent);
+
+ var messages = optionsJson.GetProperty("messages");
+ Assert.Equal(5, messages.GetArrayLength());
+
+ var userFirstPrompt = messages[0];
+ Assert.Equal("user", userFirstPrompt.GetProperty("role").GetString());
+ Assert.Equal("What time is it?", userFirstPrompt.GetProperty("content").ToString());
+
+ var assistantFirstResponse = messages[1];
+ Assert.Equal("assistant", assistantFirstResponse.GetProperty("role").GetString());
+ Assert.Equal("08/06/2024 00:00:00", assistantFirstResponse.GetProperty("content").GetString());
+
+ var userSecondPrompt = messages[2];
+ Assert.Equal("user", userSecondPrompt.GetProperty("role").GetString());
+ Assert.Equal("Given the current time of day and weather, what is the likely color of the sky in Boston?", userSecondPrompt.GetProperty("content").ToString());
+
+ var assistantSecondResponse = messages[3];
+ Assert.Equal("assistant", assistantSecondResponse.GetProperty("role").GetString());
+ Assert.Equal("1", assistantSecondResponse.GetProperty("tool_calls")[0].GetProperty("id").GetString());
+ Assert.Equal("MyPlugin-GetCurrentWeather", assistantSecondResponse.GetProperty("tool_calls")[0].GetProperty("function").GetProperty("name").GetString());
+
+ var functionResult = messages[4];
+ Assert.Equal("tool", functionResult.GetProperty("role").GetString());
+ Assert.Equal("rainy", functionResult.GetProperty("content").GetString());
+ }
+
public void Dispose()
{
this._httpClient.Dispose();
@@ -950,10 +1118,27 @@ public void Dispose()
public static TheoryData