From 084e1ca1f5db906006001176d3ca0c884c1c9b18 Mon Sep 17 00:00:00 2001 From: markwallace-microsoft <127216156+markwallace-microsoft@users.noreply.github.com> Date: Wed, 2 Apr 2025 13:17:59 +0100 Subject: [PATCH 1/6] Add a BoolJsonConverter to handle bool values when converting from PromptExecutionSettings --- dotnet/SK-dotnet.sln | 1 + .../AmazonCommandExecutionSettings.cs | 1 + .../AmazonCommandRExecutionSettings.cs | 3 + .../OpenAIPromptExecutionSettingsTests.cs | 81 +++++++++++++++++++ .../GeminiPromptExecutionSettings.cs | 1 + .../HuggingFacePromptExecutionSettings.cs | 6 ++ .../MistralAIPromptExecutionSettings.cs | 1 + .../Connectors.Onnx/Connectors.Onnx.csproj | 1 + ...OnnxRuntimeGenAIPromptExecutionSettings.cs | 2 + .../Connectors.OpenAI.csproj | 2 +- .../Settings/OpenAIPromptExecutionSettings.cs | 4 + .../{PromptyTest.cs => PromptyTests.cs} | 48 ++++++++++- .../Functions.Prompty/AssemblyInfo.cs | 6 -- .../Functions.Prompty.csproj | 2 +- .../src/Text/BoolJsonConverter.cs | 61 ++++++++++++++ .../AI/PromptExecutionSettings.cs | 2 - 16 files changed, 210 insertions(+), 12 deletions(-) rename dotnet/src/Functions/Functions.Prompty.UnitTests/{PromptyTest.cs => PromptyTests.cs} (87%) delete mode 100644 dotnet/src/Functions/Functions.Prompty/AssemblyInfo.cs create mode 100644 dotnet/src/InternalUtilities/src/Text/BoolJsonConverter.cs diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln index 16fa0c43cd0d..64c4649e44e7 100644 --- a/dotnet/SK-dotnet.sln +++ b/dotnet/SK-dotnet.sln @@ -224,6 +224,7 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Functions.OpenApi.Extension EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Text", "Text", "{EB2C141A-AE5F-4080-8790-13EB16323CEF}" ProjectSection(SolutionItems) = preProject + src\Connectors\Connectors.OpenAI\Text\BoolJsonConverter.cs = src\Connectors\Connectors.OpenAI\Text\BoolJsonConverter.cs src\InternalUtilities\src\Text\ExceptionJsonConverter.cs = src\InternalUtilities\src\Text\ExceptionJsonConverter.cs src\InternalUtilities\src\Text\JsonOptionsCache.cs = src\InternalUtilities\src\Text\JsonOptionsCache.cs src\InternalUtilities\src\Text\SseData.cs = src\InternalUtilities\src\Text\SseData.cs diff --git a/dotnet/src/Connectors/Connectors.Amazon/Bedrock/Settings/AmazonCommandExecutionSettings.cs b/dotnet/src/Connectors/Connectors.Amazon/Bedrock/Settings/AmazonCommandExecutionSettings.cs index 58624e76b292..ad1cae276650 100644 --- a/dotnet/src/Connectors/Connectors.Amazon/Bedrock/Settings/AmazonCommandExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.Amazon/Bedrock/Settings/AmazonCommandExecutionSettings.cs @@ -112,6 +112,7 @@ public string? ReturnLikelihoods /// (Required to support streaming) Specify true to return the response piece-by-piece in real-time and false to return the complete response after the process finishes. /// [JsonPropertyName("stream")] + [JsonConverter(typeof(BoolJsonConverter))] public bool? Stream { get => this._stream; diff --git a/dotnet/src/Connectors/Connectors.Amazon/Bedrock/Settings/AmazonCommandRExecutionSettings.cs b/dotnet/src/Connectors/Connectors.Amazon/Bedrock/Settings/AmazonCommandRExecutionSettings.cs index 082dd1034b0a..71fa790b5608 100644 --- a/dotnet/src/Connectors/Connectors.Amazon/Bedrock/Settings/AmazonCommandRExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.Amazon/Bedrock/Settings/AmazonCommandRExecutionSettings.cs @@ -63,6 +63,7 @@ public List? Documents /// Defaults to false. When true, the response will only contain a list of generated search queries, but no search will take place, and no reply from the model to the user's message will be generated. /// [JsonPropertyName("search_queries_only")] + [JsonConverter(typeof(BoolJsonConverter))] public bool? SearchQueriesOnly { get => this._searchQueriesOnly; @@ -203,6 +204,7 @@ public int? Seed /// Specify true to return the full prompt that was sent to the model. The default value is false. In the response, the prompt in the prompt field. /// [JsonPropertyName("return_prompt")] + [JsonConverter(typeof(BoolJsonConverter))] public bool? ReturnPrompt { get => this._returnPrompt; @@ -259,6 +261,7 @@ public List? StopSequences /// Specify true, to send the user's message to the model without any preprocessing, otherwise false. /// [JsonPropertyName("raw_prompting")] + [JsonConverter(typeof(BoolJsonConverter))] public bool? RawPrompting { get => this._rawPrompting; diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs index b4bfd8634808..72eb2f6fde2a 100644 --- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs @@ -64,6 +64,86 @@ public void ItRestoresOriginalFunctionChoiceBehavior() Assert.Equal(functionChoiceBehavior, result.FunctionChoiceBehavior); } + [Fact] + public void ItCanCreateOpenAIPromptExecutionSettingsFromPromptExecutionSettings() + { + // Arrange + PromptExecutionSettings originalSettings = new() + { + ExtensionData = new Dictionary() + { + { "temperature", 0.7 }, + { "top_p", 0.7 }, + { "frequency_penalty", 0.7 }, + { "presence_penalty", 0.7 }, + { "stop_sequences", new string[] { "foo", "bar" } }, + { "chat_system_prompt", "chat system prompt" }, + { "token_selection_biases", new Dictionary() { { 1, 2 }, { 3, 4 } } }, + { "max_tokens", 128 }, + { "logprobs", true }, + { "seed", 123456 }, + { "top_logprobs", 5 }, + } + }; + + // Act + OpenAIPromptExecutionSettings executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings); + + // Assert + AssertExecutionSettings(executionSettings); + } + + [Fact] + public void ItCanCreateOpenAIPromptExecutionSettingsFromPromptExecutionSettingsWithIncorrectTypes() + { + // Arrange + PromptExecutionSettings originalSettings = new() + { + ExtensionData = new Dictionary() + { + { "temperature", "0.7" }, + { "top_p", "0.7" }, + { "frequency_penalty", "0.7" }, + { "presence_penalty", "0.7" }, + { "stop_sequences", new List { "foo", "bar" } }, + { "chat_system_prompt", "chat system prompt" }, + { "token_selection_biases", new Dictionary() { { "1", "2" }, { "3", "4" } } }, + { "max_tokens", "128" }, + { "logprobs", "true" }, + { "seed", "123456" }, + { "top_logprobs", "5" }, + } + }; + + // Act + OpenAIPromptExecutionSettings executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings); + + // Assert + AssertExecutionSettings(executionSettings); + } + + [Theory] + [InlineData("")] + [InlineData("123")] + [InlineData("Foo")] + [InlineData(1)] + [InlineData(1.0)] + public void ItCannotCreateOpenAIPromptExecutionSettingsWithInvalidBoolValues(object value) + { + // Arrange + PromptExecutionSettings originalSettings = new() + { + ExtensionData = new Dictionary() + { + { "logprobs", value } + } + }; + + // Act & Assert + Assert.Throws(() => OpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings)); + } + + #region private private static void AssertExecutionSettings(OpenAIPromptExecutionSettings executionSettings) { Assert.NotNull(executionSettings); @@ -79,4 +159,5 @@ private static void AssertExecutionSettings(OpenAIPromptExecutionSettings execut Assert.Equal(true, executionSettings.Logprobs); Assert.Equal(5, executionSettings.TopLogprobs); } + #endregion } diff --git a/dotnet/src/Connectors/Connectors.Google/GeminiPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.Google/GeminiPromptExecutionSettings.cs index daa8ea629a5e..5bedc63c0266 100644 --- a/dotnet/src/Connectors/Connectors.Google/GeminiPromptExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.Google/GeminiPromptExecutionSettings.cs @@ -189,6 +189,7 @@ public GeminiToolCallBehavior? ToolCallBehavior /// [JsonPropertyName("audio_timestamp")] [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + [JsonConverter(typeof(BoolJsonConverter))] public bool? AudioTimestamp { get => this._audioTimestamp; diff --git a/dotnet/src/Connectors/Connectors.HuggingFace/HuggingFacePromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.HuggingFace/HuggingFacePromptExecutionSettings.cs index 7c07ee1f8adb..dd0fd03742cf 100644 --- a/dotnet/src/Connectors/Connectors.HuggingFace/HuggingFacePromptExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.HuggingFace/HuggingFacePromptExecutionSettings.cs @@ -183,6 +183,7 @@ public bool UseCache /// This may not be supported by all models/inference API. /// [JsonPropertyName("wait_for_model")] + [JsonConverter(typeof(BoolJsonConverter))] public bool WaitForModel { get => this._waitForModel; @@ -233,6 +234,7 @@ public float? PresencePenalty /// output token returned in the content of message. /// [JsonPropertyName("logprobs")] + [JsonConverter(typeof(BoolJsonConverter))] public bool? LogProbs { get => this._logProbs; @@ -294,6 +296,7 @@ public int? TopLogProbs /// (Default: True). Bool. If set to False, the return results will not contain the original query making it easier for prompting. /// [JsonPropertyName("return_full_text")] + [JsonConverter(typeof(BoolJsonConverter))] public bool? ReturnFullText { get => this._returnFullText; @@ -309,6 +312,7 @@ public bool? ReturnFullText /// (Optional: True). Bool. Whether or not to use sampling, use greedy decoding otherwise. /// [JsonPropertyName("do_sample")] + [JsonConverter(typeof(BoolJsonConverter))] public bool? DoSample { get => this._doSample; @@ -323,6 +327,8 @@ public bool? DoSample /// /// Show details of the generation. Including usage. /// + [JsonPropertyName("details")] + [JsonConverter(typeof(BoolJsonConverter))] public bool? Details { get => this._details; diff --git a/dotnet/src/Connectors/Connectors.MistralAI/MistralAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.MistralAI/MistralAIPromptExecutionSettings.cs index 11337705520e..eb2b6760a11a 100644 --- a/dotnet/src/Connectors/Connectors.MistralAI/MistralAIPromptExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.MistralAI/MistralAIPromptExecutionSettings.cs @@ -77,6 +77,7 @@ public int? MaxTokens /// Whether to inject a safety prompt before all conversations. /// [JsonPropertyName("safe_prompt")] + [JsonConverter(typeof(BoolJsonConverter))] public bool SafePrompt { get => this._safePrompt; diff --git a/dotnet/src/Connectors/Connectors.Onnx/Connectors.Onnx.csproj b/dotnet/src/Connectors/Connectors.Onnx/Connectors.Onnx.csproj index c988ac39a7ad..3ba5ad5ff071 100644 --- a/dotnet/src/Connectors/Connectors.Onnx/Connectors.Onnx.csproj +++ b/dotnet/src/Connectors/Connectors.Onnx/Connectors.Onnx.csproj @@ -23,6 +23,7 @@ + diff --git a/dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIPromptExecutionSettings.cs index e8c7f058fd24..d463f1115196 100644 --- a/dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIPromptExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIPromptExecutionSettings.cs @@ -91,6 +91,7 @@ public static OnnxRuntimeGenAIPromptExecutionSettings FromExecutionSettings(Prom /// The past/present kv tensors are shared and allocated once to max_length (cuda only) /// [JsonPropertyName("past_present_share_buffer")] + [JsonConverter(typeof(BoolJsonConverter))] public bool? PastPresentShareBuffer { get; set; } /// @@ -145,5 +146,6 @@ public static OnnxRuntimeGenAIPromptExecutionSettings FromExecutionSettings(Prom /// Do random sampling /// [JsonPropertyName("do_sample")] + [JsonConverter(typeof(BoolJsonConverter))] public bool? DoSample { get; set; } } diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj b/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj index 64a0e72bde6d..ea6cd02fa983 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj +++ b/dotnet/src/Connectors/Connectors.OpenAI/Connectors.OpenAI.csproj @@ -41,7 +41,7 @@ - + diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs index 12b44717113a..efedaaeaad48 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs @@ -284,6 +284,8 @@ public ToolCallBehavior? ToolCallBehavior /// /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse /// + [JsonPropertyName("user")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] public string? User { get => this._user; @@ -302,6 +304,7 @@ public string? User [Experimental("SKEXP0010")] [JsonPropertyName("logprobs")] [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + [JsonConverter(typeof(BoolJsonConverter))] public bool? Logprobs { get => this._logprobs; @@ -353,6 +356,7 @@ public IDictionary? Metadata [Experimental("SKEXP0010")] [JsonPropertyName("store")] [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + [JsonConverter(typeof(BoolJsonConverter))] public bool? Store { get => this._store; diff --git a/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs b/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTests.cs similarity index 87% rename from dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs rename to dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTests.cs index a019f6bbfba9..82d991d07c61 100644 --- a/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTest.cs +++ b/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTests.cs @@ -16,7 +16,7 @@ namespace SemanticKernel.Functions.Prompty.UnitTests; -public sealed class PromptyTest +public sealed class PromptyTests { [Fact] public void ChatPromptyTest() @@ -128,7 +128,7 @@ public void ItShouldCreateFunctionFromPromptYamlWithEmbeddedFileProvider() // Arrange Kernel kernel = new(); var chatPromptyPath = Path.Combine("TestData", "chat.prompty"); - ManifestEmbeddedFileProvider manifestEmbeddedProvider = new(typeof(PromptyTest).Assembly); + ManifestEmbeddedFileProvider manifestEmbeddedProvider = new(typeof(PromptyTests).Assembly); // Act var kernelFunction = kernel.CreateFunctionFromPromptyFile(chatPromptyPath, @@ -357,6 +357,50 @@ public void ItCreatesInputVariablesOnlyWhenNoneAreExplicitlySet() Assert.Equal(expectedVariables, kernelFunction.Metadata.Parameters.Select(p => p.Name)); } + [Fact] + public void ItShouldLoadExecutionSettings() + { + // Arrange + const string Prompty = """ + --- + name: SomePrompt + description: This is the description. + model: + api: chat + configuration: + type: azure_openai_beta + parameters: + logprobs: !!bool true + top_logprobs: 2 + top_p: 1.0 + user: Bob + stop_sequences: + - END + - COMPLETE + token_selection_biases: + 1: 2 + 3: 4 + --- + Abc---def + """; + + // Act + var kernelFunction = new Kernel().CreateFunctionFromPrompty(Prompty); + PromptExecutionSettings executionSettings = kernelFunction.ExecutionSettings!["default"]; + + // Assert + Assert.NotNull(kernelFunction); + Assert.NotNull(executionSettings); + var openaiExecutionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(executionSettings); + Assert.NotNull(openaiExecutionSettings); + Assert.True(openaiExecutionSettings.Logprobs); + Assert.Equal(2, openaiExecutionSettings.TopLogprobs); + Assert.Equal(1.0, openaiExecutionSettings.TopP); + Assert.Equal("Bob", openaiExecutionSettings.User); + Assert.Equal(["END", "COMPLETE"], openaiExecutionSettings.StopSequences); + Assert.Equal(new Dictionary() { { 1, 2 }, { 3, 4 } }, openaiExecutionSettings.TokenSelectionBiases); + } + private sealed class EchoTextGenerationService : ITextGenerationService { public IReadOnlyDictionary Attributes { get; } = new Dictionary(); diff --git a/dotnet/src/Functions/Functions.Prompty/AssemblyInfo.cs b/dotnet/src/Functions/Functions.Prompty/AssemblyInfo.cs deleted file mode 100644 index a7534ccf9f38..000000000000 --- a/dotnet/src/Functions/Functions.Prompty/AssemblyInfo.cs +++ /dev/null @@ -1,6 +0,0 @@ -// Copyright (c) Microsoft. All rights reserved. - -using System.Diagnostics.CodeAnalysis; - -// This assembly is currently experimental. -[assembly: Experimental("SKEXP0040")] diff --git a/dotnet/src/Functions/Functions.Prompty/Functions.Prompty.csproj b/dotnet/src/Functions/Functions.Prompty/Functions.Prompty.csproj index 44ffa76868dc..3b7fb3d4839c 100644 --- a/dotnet/src/Functions/Functions.Prompty/Functions.Prompty.csproj +++ b/dotnet/src/Functions/Functions.Prompty/Functions.Prompty.csproj @@ -4,7 +4,7 @@ Microsoft.SemanticKernel.Prompty $(AssemblyName) net8.0;netstandard2.0 - alpha + beta $(NoWarn);CA1812 diff --git a/dotnet/src/InternalUtilities/src/Text/BoolJsonConverter.cs b/dotnet/src/InternalUtilities/src/Text/BoolJsonConverter.cs new file mode 100644 index 000000000000..aefd52e471a8 --- /dev/null +++ b/dotnet/src/InternalUtilities/src/Text/BoolJsonConverter.cs @@ -0,0 +1,61 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Text; + +/// +/// Deserializes a bool from a string. This is useful when deserializing a instance that contains bool properties. +/// Serializing a instance without this converter will throw a 'System.Text.Json.JsonException : The JSON value could not be converted to System.Nullable' +/// if there are any bool properties. +/// +internal sealed class BoolJsonConverter : JsonConverter +{ + /// + public override bool? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + if (reader.TokenType == JsonTokenType.String) + { + string? value = reader.GetString(); + if (value is null) + { + return null; + } + if (value.Equals("true", StringComparison.OrdinalIgnoreCase)) + { + return true; + } + else if (value.Equals("false", StringComparison.OrdinalIgnoreCase)) + { + return false; + } + + throw new ArgumentException($"Value '{value}' can be parsed as a boolean value"); + } + else if (reader.TokenType == JsonTokenType.True) + { + return true; + } + else if (reader.TokenType == JsonTokenType.False) + { + return false; + } + + throw new ArgumentException($"Invalid token type found '{reader.TokenType}', expected a boolean value."); + } + + /// + public override void Write(Utf8JsonWriter writer, bool? value, JsonSerializerOptions options) + { + if (value is null) + { + writer.WriteNullValue(); + } + else + { + writer.WriteBooleanValue((bool)value); + } + } +} diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs index c42baa962d7e..b767c478b1bc 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs @@ -3,7 +3,6 @@ using System; using System.Collections.Generic; using System.Collections.ObjectModel; -using System.Diagnostics.CodeAnalysis; using System.Text.Json.Serialization; using Microsoft.SemanticKernel.ChatCompletion; using Microsoft.SemanticKernel.TextGeneration; @@ -36,7 +35,6 @@ public class PromptExecutionSettings /// When provided, this service identifier will be the key in a dictionary collection of execution settings for both and . /// If not provided the service identifier will be the default value in . /// - [Experimental("SKEXP0001")] [JsonPropertyName("service_id")] public string? ServiceId { From 44d2d750c328e89957a358025b6fe528e6d05cf0 Mon Sep 17 00:00:00 2001 From: markwallace-microsoft <127216156+markwallace-microsoft@users.noreply.github.com> Date: Wed, 2 Apr 2025 13:29:33 +0100 Subject: [PATCH 2/6] Add pragma to ignore CA1812 --- dotnet/src/InternalUtilities/src/Text/BoolJsonConverter.cs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dotnet/src/InternalUtilities/src/Text/BoolJsonConverter.cs b/dotnet/src/InternalUtilities/src/Text/BoolJsonConverter.cs index aefd52e471a8..bba4a88b6304 100644 --- a/dotnet/src/InternalUtilities/src/Text/BoolJsonConverter.cs +++ b/dotnet/src/InternalUtilities/src/Text/BoolJsonConverter.cs @@ -6,6 +6,8 @@ namespace Microsoft.SemanticKernel.Text; +#pragma warning disable CA1812 // Instantiated via JsonConverterAttribute + /// /// Deserializes a bool from a string. This is useful when deserializing a instance that contains bool properties. /// Serializing a instance without this converter will throw a 'System.Text.Json.JsonException : The JSON value could not be converted to System.Nullable' From fabffc2569007e796ac60d39a1856d96309e8bc9 Mon Sep 17 00:00:00 2001 From: markwallace-microsoft <127216156+markwallace-microsoft@users.noreply.github.com> Date: Wed, 2 Apr 2025 15:17:05 +0100 Subject: [PATCH 3/6] Adding additional unit tests --- dotnet/SK-dotnet.sln | 1 + .../AmazonCommandExecutionSettings.cs | 2 +- .../AmazonCommandRExecutionSettings.cs | 6 +- .../OpenAIPromptExecutionSettingsTests.cs | 46 ++++++- .../GeminiPromptExecutionSettings.cs | 2 +- .../HuggingFacePromptExecutionSettings.cs | 8 +- .../Connectors.Onnx/Connectors.Onnx.csproj | 2 +- ...OnnxRuntimeGenAIPromptExecutionSettings.cs | 5 +- .../OpenAIPromptExecutionSettingsTests.cs | 127 ++++++++++++++++++ .../Settings/OpenAIPromptExecutionSettings.cs | 4 +- .../src/Text/BoolJsonConverter.cs | 17 +-- .../src/Text/OptionalBoolJsonConverter.cs | 63 +++++++++ 12 files changed, 251 insertions(+), 32 deletions(-) create mode 100644 dotnet/src/InternalUtilities/src/Text/OptionalBoolJsonConverter.cs diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln index 64c4649e44e7..970a7ef951f9 100644 --- a/dotnet/SK-dotnet.sln +++ b/dotnet/SK-dotnet.sln @@ -227,6 +227,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Text", "Text", "{EB2C141A-A src\Connectors\Connectors.OpenAI\Text\BoolJsonConverter.cs = src\Connectors\Connectors.OpenAI\Text\BoolJsonConverter.cs src\InternalUtilities\src\Text\ExceptionJsonConverter.cs = src\InternalUtilities\src\Text\ExceptionJsonConverter.cs src\InternalUtilities\src\Text\JsonOptionsCache.cs = src\InternalUtilities\src\Text\JsonOptionsCache.cs + OptionalBoolJsonConverter.cs = OptionalBoolJsonConverter.cs src\InternalUtilities\src\Text\SseData.cs = src\InternalUtilities\src\Text\SseData.cs src\InternalUtilities\src\Text\SseJsonParser.cs = src\InternalUtilities\src\Text\SseJsonParser.cs src\InternalUtilities\src\Text\SseLine.cs = src\InternalUtilities\src\Text\SseLine.cs diff --git a/dotnet/src/Connectors/Connectors.Amazon/Bedrock/Settings/AmazonCommandExecutionSettings.cs b/dotnet/src/Connectors/Connectors.Amazon/Bedrock/Settings/AmazonCommandExecutionSettings.cs index ad1cae276650..928183399785 100644 --- a/dotnet/src/Connectors/Connectors.Amazon/Bedrock/Settings/AmazonCommandExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.Amazon/Bedrock/Settings/AmazonCommandExecutionSettings.cs @@ -112,7 +112,7 @@ public string? ReturnLikelihoods /// (Required to support streaming) Specify true to return the response piece-by-piece in real-time and false to return the complete response after the process finishes. /// [JsonPropertyName("stream")] - [JsonConverter(typeof(BoolJsonConverter))] + [JsonConverter(typeof(OptionalBoolJsonConverter))] public bool? Stream { get => this._stream; diff --git a/dotnet/src/Connectors/Connectors.Amazon/Bedrock/Settings/AmazonCommandRExecutionSettings.cs b/dotnet/src/Connectors/Connectors.Amazon/Bedrock/Settings/AmazonCommandRExecutionSettings.cs index 71fa790b5608..32a26d6603ae 100644 --- a/dotnet/src/Connectors/Connectors.Amazon/Bedrock/Settings/AmazonCommandRExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.Amazon/Bedrock/Settings/AmazonCommandRExecutionSettings.cs @@ -63,7 +63,7 @@ public List? Documents /// Defaults to false. When true, the response will only contain a list of generated search queries, but no search will take place, and no reply from the model to the user's message will be generated. /// [JsonPropertyName("search_queries_only")] - [JsonConverter(typeof(BoolJsonConverter))] + [JsonConverter(typeof(OptionalBoolJsonConverter))] public bool? SearchQueriesOnly { get => this._searchQueriesOnly; @@ -204,7 +204,7 @@ public int? Seed /// Specify true to return the full prompt that was sent to the model. The default value is false. In the response, the prompt in the prompt field. /// [JsonPropertyName("return_prompt")] - [JsonConverter(typeof(BoolJsonConverter))] + [JsonConverter(typeof(OptionalBoolJsonConverter))] public bool? ReturnPrompt { get => this._returnPrompt; @@ -261,7 +261,7 @@ public List? StopSequences /// Specify true, to send the user's message to the model without any preprocessing, otherwise false. /// [JsonPropertyName("raw_prompting")] - [JsonConverter(typeof(BoolJsonConverter))] + [JsonConverter(typeof(OptionalBoolJsonConverter))] public bool? RawPrompting { get => this._rawPrompting; diff --git a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs index 72eb2f6fde2a..b07b0751b4ff 100644 --- a/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs +++ b/dotnet/src/Connectors/Connectors.AzureOpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; +using System.Text.Json; using Azure.AI.OpenAI.Chat; using Microsoft.SemanticKernel; using Microsoft.SemanticKernel.Connectors.AzureOpenAI; @@ -65,7 +66,7 @@ public void ItRestoresOriginalFunctionChoiceBehavior() } [Fact] - public void ItCanCreateOpenAIPromptExecutionSettingsFromPromptExecutionSettings() + public void ItCanCreateAzureOpenAIPromptExecutionSettingsFromPromptExecutionSettings() { // Arrange PromptExecutionSettings originalSettings = new() @@ -87,14 +88,47 @@ public void ItCanCreateOpenAIPromptExecutionSettingsFromPromptExecutionSettings( }; // Act - OpenAIPromptExecutionSettings executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings); + AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings); + + // Assert + AssertExecutionSettings(executionSettings); + } + + [Fact] + public void ItCanCreateAzureOpenAIPromptExecutionSettingsFromJson() + { + // Arrange + var json = + """ + { + "temperature": 0.7, + "top_p": 0.7, + "frequency_penalty": 0.7, + "presence_penalty": 0.7, + "stop_sequences": [ "foo", "bar" ], + "chat_system_prompt": "chat system prompt", + "token_selection_biases": + { + "1": "2", + "3": "4" + }, + "max_tokens": 128, + "logprobs": true, + "seed": 123456, + "top_logprobs": 5 + } + """; + + // Act + var originalSettings = JsonSerializer.Deserialize(json); + OpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings); // Assert AssertExecutionSettings(executionSettings); } [Fact] - public void ItCanCreateOpenAIPromptExecutionSettingsFromPromptExecutionSettingsWithIncorrectTypes() + public void ItCanCreateAzureOpenAIPromptExecutionSettingsFromPromptExecutionSettingsWithIncorrectTypes() { // Arrange PromptExecutionSettings originalSettings = new() @@ -116,7 +150,7 @@ public void ItCanCreateOpenAIPromptExecutionSettingsFromPromptExecutionSettingsW }; // Act - OpenAIPromptExecutionSettings executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings); + AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings); // Assert AssertExecutionSettings(executionSettings); @@ -128,7 +162,7 @@ public void ItCanCreateOpenAIPromptExecutionSettingsFromPromptExecutionSettingsW [InlineData("Foo")] [InlineData(1)] [InlineData(1.0)] - public void ItCannotCreateOpenAIPromptExecutionSettingsWithInvalidBoolValues(object value) + public void ItCannotCreateAzureOpenAIPromptExecutionSettingsWithInvalidBoolValues(object value) { // Arrange PromptExecutionSettings originalSettings = new() @@ -140,7 +174,7 @@ public void ItCannotCreateOpenAIPromptExecutionSettingsWithInvalidBoolValues(obj }; // Act & Assert - Assert.Throws(() => OpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings)); + Assert.Throws(() => AzureOpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings)); } #region private diff --git a/dotnet/src/Connectors/Connectors.Google/GeminiPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.Google/GeminiPromptExecutionSettings.cs index 5bedc63c0266..bcbbbd4f9e8c 100644 --- a/dotnet/src/Connectors/Connectors.Google/GeminiPromptExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.Google/GeminiPromptExecutionSettings.cs @@ -189,7 +189,7 @@ public GeminiToolCallBehavior? ToolCallBehavior /// [JsonPropertyName("audio_timestamp")] [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - [JsonConverter(typeof(BoolJsonConverter))] + [JsonConverter(typeof(OptionalBoolJsonConverter))] public bool? AudioTimestamp { get => this._audioTimestamp; diff --git a/dotnet/src/Connectors/Connectors.HuggingFace/HuggingFacePromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.HuggingFace/HuggingFacePromptExecutionSettings.cs index dd0fd03742cf..7767e8292246 100644 --- a/dotnet/src/Connectors/Connectors.HuggingFace/HuggingFacePromptExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.HuggingFace/HuggingFacePromptExecutionSettings.cs @@ -234,7 +234,7 @@ public float? PresencePenalty /// output token returned in the content of message. /// [JsonPropertyName("logprobs")] - [JsonConverter(typeof(BoolJsonConverter))] + [JsonConverter(typeof(OptionalBoolJsonConverter))] public bool? LogProbs { get => this._logProbs; @@ -296,7 +296,7 @@ public int? TopLogProbs /// (Default: True). Bool. If set to False, the return results will not contain the original query making it easier for prompting. /// [JsonPropertyName("return_full_text")] - [JsonConverter(typeof(BoolJsonConverter))] + [JsonConverter(typeof(OptionalBoolJsonConverter))] public bool? ReturnFullText { get => this._returnFullText; @@ -312,7 +312,7 @@ public bool? ReturnFullText /// (Optional: True). Bool. Whether or not to use sampling, use greedy decoding otherwise. /// [JsonPropertyName("do_sample")] - [JsonConverter(typeof(BoolJsonConverter))] + [JsonConverter(typeof(OptionalBoolJsonConverter))] public bool? DoSample { get => this._doSample; @@ -328,7 +328,7 @@ public bool? DoSample /// Show details of the generation. Including usage. /// [JsonPropertyName("details")] - [JsonConverter(typeof(BoolJsonConverter))] + [JsonConverter(typeof(OptionalBoolJsonConverter))] public bool? Details { get => this._details; diff --git a/dotnet/src/Connectors/Connectors.Onnx/Connectors.Onnx.csproj b/dotnet/src/Connectors/Connectors.Onnx/Connectors.Onnx.csproj index 3ba5ad5ff071..c7fca5dc7346 100644 --- a/dotnet/src/Connectors/Connectors.Onnx/Connectors.Onnx.csproj +++ b/dotnet/src/Connectors/Connectors.Onnx/Connectors.Onnx.csproj @@ -23,7 +23,7 @@ - + diff --git a/dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIPromptExecutionSettings.cs index d463f1115196..aadc7a600a2f 100644 --- a/dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIPromptExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.Onnx/OnnxRuntimeGenAIPromptExecutionSettings.cs @@ -91,7 +91,7 @@ public static OnnxRuntimeGenAIPromptExecutionSettings FromExecutionSettings(Prom /// The past/present kv tensors are shared and allocated once to max_length (cuda only) /// [JsonPropertyName("past_present_share_buffer")] - [JsonConverter(typeof(BoolJsonConverter))] + [JsonConverter(typeof(OptionalBoolJsonConverter))] public bool? PastPresentShareBuffer { get; set; } /// @@ -140,12 +140,13 @@ public static OnnxRuntimeGenAIPromptExecutionSettings FromExecutionSettings(Prom /// Allows the generation to stop early if all beam candidates reach the end token /// [JsonPropertyName("early_stopping")] + [JsonConverter(typeof(OptionalBoolJsonConverter))] public bool? EarlyStopping { get; set; } /// /// Do random sampling /// [JsonPropertyName("do_sample")] - [JsonConverter(typeof(BoolJsonConverter))] + [JsonConverter(typeof(OptionalBoolJsonConverter))] public bool? DoSample { get; set; } } diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs index dda1af38a596..95bcaf3ce29c 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs @@ -317,6 +317,133 @@ public void ItRestoresOriginalFunctionChoiceBehavior() Assert.Equal(functionChoiceBehavior, result.FunctionChoiceBehavior); } + [Fact] + public void ItCanCreateOpenAIPromptExecutionSettingsFromPromptExecutionSettings() + { + // Arrange + PromptExecutionSettings originalSettings = new() + { + ExtensionData = new Dictionary() + { + { "temperature", 0.7 }, + { "top_p", 0.7 }, + { "frequency_penalty", 0.7 }, + { "presence_penalty", 0.7 }, + { "stop_sequences", new string[] { "foo", "bar" } }, + { "chat_system_prompt", "chat system prompt" }, + { "chat_developer_prompt", "chat developer prompt" }, + { "reasoning_effort", "high" }, + { "token_selection_biases", new Dictionary() { { 1, 2 }, { 3, 4 } } }, + { "max_tokens", 128 }, + { "logprobs", true }, + { "seed", 123456 }, + { "store", true }, + { "top_logprobs", 5 }, + { "metadata", new Dictionary() { { "foo", "bar" } } } + } + }; + + // Act + OpenAIPromptExecutionSettings executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings); + + // Assert + AssertExecutionSettings(executionSettings); + } + + [Fact] + public void ItCanCreateOpenAIPromptExecutionSettingsFromJson() + { + // Arrange + var json = + """ + { + "temperature": 0.7, + "top_p": 0.7, + "frequency_penalty": 0.7, + "presence_penalty": 0.7, + "stop_sequences": [ "foo", "bar" ], + "chat_system_prompt": "chat system prompt", + "chat_developer_prompt": "chat developer prompt", + "reasoning_effort": "high", + "token_selection_biases": + { + "1": "2", + "3": "4" + }, + "max_tokens": 128, + "logprobs": true, + "seed": 123456, + "store": true, + "top_logprobs": 5, + "metadata": + { + "foo": "bar" + } + } + """; + + // Act + var originalSettings = JsonSerializer.Deserialize(json); + OpenAIPromptExecutionSettings executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings); + + // Assert + AssertExecutionSettings(executionSettings); + } + + [Fact] + public void ItCanCreateOpenAIPromptExecutionSettingsFromPromptExecutionSettingsWithIncorrectTypes() + { + // Arrange + PromptExecutionSettings originalSettings = new() + { + ExtensionData = new Dictionary() + { + { "temperature", "0.7" }, + { "top_p", "0.7" }, + { "frequency_penalty", "0.7" }, + { "presence_penalty", "0.7" }, + { "stop_sequences", new List { "foo", "bar" } }, + { "chat_system_prompt", "chat system prompt" }, + { "chat_developer_prompt", "chat developer prompt" }, + { "reasoning_effort", "high" }, + { "token_selection_biases", new Dictionary() { { "1", "2" }, { "3", "4" } } }, + { "max_tokens", "128" }, + { "logprobs", "true" }, + { "seed", "123456" }, + { "store", true }, + { "top_logprobs", "5" }, + { "metadata", new Dictionary() { { "foo", "bar" } } } + } + }; + + // Act + OpenAIPromptExecutionSettings executionSettings = OpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings); + + // Assert + AssertExecutionSettings(executionSettings); + } + + [Theory] + [InlineData("")] + [InlineData("123")] + [InlineData("Foo")] + [InlineData(1)] + [InlineData(1.0)] + public void ItCannotCreateOpenAIPromptExecutionSettingsWithInvalidBoolValues(object value) + { + // Arrange + PromptExecutionSettings originalSettings = new() + { + ExtensionData = new Dictionary() + { + { "logprobs", value } + } + }; + + // Act & Assert + Assert.Throws(() => OpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings)); + } + private static void AssertExecutionSettings(OpenAIPromptExecutionSettings executionSettings) { Assert.NotNull(executionSettings); diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs index efedaaeaad48..355d16eb7c7f 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI/Settings/OpenAIPromptExecutionSettings.cs @@ -304,7 +304,7 @@ public string? User [Experimental("SKEXP0010")] [JsonPropertyName("logprobs")] [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - [JsonConverter(typeof(BoolJsonConverter))] + [JsonConverter(typeof(OptionalBoolJsonConverter))] public bool? Logprobs { get => this._logprobs; @@ -356,7 +356,7 @@ public IDictionary? Metadata [Experimental("SKEXP0010")] [JsonPropertyName("store")] [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - [JsonConverter(typeof(BoolJsonConverter))] + [JsonConverter(typeof(OptionalBoolJsonConverter))] public bool? Store { get => this._store; diff --git a/dotnet/src/InternalUtilities/src/Text/BoolJsonConverter.cs b/dotnet/src/InternalUtilities/src/Text/BoolJsonConverter.cs index bba4a88b6304..a4b1787afc3b 100644 --- a/dotnet/src/InternalUtilities/src/Text/BoolJsonConverter.cs +++ b/dotnet/src/InternalUtilities/src/Text/BoolJsonConverter.cs @@ -13,17 +13,17 @@ namespace Microsoft.SemanticKernel.Text; /// Serializing a instance without this converter will throw a 'System.Text.Json.JsonException : The JSON value could not be converted to System.Nullable' /// if there are any bool properties. /// -internal sealed class BoolJsonConverter : JsonConverter +internal sealed class BoolJsonConverter : JsonConverter { /// - public override bool? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + public override bool Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) { if (reader.TokenType == JsonTokenType.String) { string? value = reader.GetString(); if (value is null) { - return null; + return false; } if (value.Equals("true", StringComparison.OrdinalIgnoreCase)) { @@ -49,15 +49,8 @@ internal sealed class BoolJsonConverter : JsonConverter } /// - public override void Write(Utf8JsonWriter writer, bool? value, JsonSerializerOptions options) + public override void Write(Utf8JsonWriter writer, bool value, JsonSerializerOptions options) { - if (value is null) - { - writer.WriteNullValue(); - } - else - { - writer.WriteBooleanValue((bool)value); - } + writer.WriteBooleanValue((bool)value); } } diff --git a/dotnet/src/InternalUtilities/src/Text/OptionalBoolJsonConverter.cs b/dotnet/src/InternalUtilities/src/Text/OptionalBoolJsonConverter.cs new file mode 100644 index 000000000000..acc216bd8d79 --- /dev/null +++ b/dotnet/src/InternalUtilities/src/Text/OptionalBoolJsonConverter.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace Microsoft.SemanticKernel.Text; + +#pragma warning disable CA1812 // Instantiated via JsonConverterAttribute + +/// +/// Deserializes a bool from a string. This is useful when deserializing a instance that contains bool properties. +/// Serializing a instance without this converter will throw a 'System.Text.Json.JsonException : The JSON value could not be converted to System.Nullable' +/// if there are any bool properties. +/// +internal sealed class OptionalBoolJsonConverter : JsonConverter +{ + /// + public override bool? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + if (reader.TokenType == JsonTokenType.String) + { + string? value = reader.GetString(); + if (value is null) + { + return null; + } + if (value.Equals("true", StringComparison.OrdinalIgnoreCase)) + { + return true; + } + else if (value.Equals("false", StringComparison.OrdinalIgnoreCase)) + { + return false; + } + + throw new ArgumentException($"Value '{value}' can be parsed as a boolean value"); + } + else if (reader.TokenType == JsonTokenType.True) + { + return true; + } + else if (reader.TokenType == JsonTokenType.False) + { + return false; + } + + throw new ArgumentException($"Invalid token type found '{reader.TokenType}', expected a boolean value."); + } + + /// + public override void Write(Utf8JsonWriter writer, bool? value, JsonSerializerOptions options) + { + if (value is null) + { + writer.WriteNullValue(); + } + else + { + writer.WriteBooleanValue((bool)value); + } + } +} From f6fdb38f3fac062cd92044596da444b900edeb4a Mon Sep 17 00:00:00 2001 From: markwallace-microsoft <127216156+markwallace-microsoft@users.noreply.github.com> Date: Wed, 2 Apr 2025 17:47:50 +0100 Subject: [PATCH 4/6] Address code review feedback --- .../Settings/OpenAIPromptExecutionSettingsTests.cs | 3 +++ .../src/Functions/Functions.Prompty.UnitTests/PromptyTests.cs | 2 +- .../InternalUtilities/src/Text/OptionalBoolJsonConverter.cs | 4 ++++ 3 files changed, 8 insertions(+), 1 deletion(-) diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs index 9423ad6fee14..0d5b7674fc04 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs @@ -451,6 +451,9 @@ public void ItCanCreateOpenAIPromptExecutionSettingsFromPromptExecutionSettingsW [Theory] [InlineData("")] + [InlineData(" true ")] + [InlineData("true ")] + [InlineData(" true")] [InlineData("123")] [InlineData("Foo")] [InlineData(1)] diff --git a/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTests.cs b/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTests.cs index 82d991d07c61..62718d07a128 100644 --- a/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTests.cs +++ b/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTests.cs @@ -370,7 +370,7 @@ public void ItShouldLoadExecutionSettings() configuration: type: azure_openai_beta parameters: - logprobs: !!bool true + logprobs: true top_logprobs: 2 top_p: 1.0 user: Bob diff --git a/dotnet/src/InternalUtilities/src/Text/OptionalBoolJsonConverter.cs b/dotnet/src/InternalUtilities/src/Text/OptionalBoolJsonConverter.cs index 46f900ab3603..3f67b3bcab1f 100644 --- a/dotnet/src/InternalUtilities/src/Text/OptionalBoolJsonConverter.cs +++ b/dotnet/src/InternalUtilities/src/Text/OptionalBoolJsonConverter.cs @@ -46,6 +46,10 @@ internal sealed class OptionalBoolJsonConverter : JsonConverter { return false; } + else if (reader.TokenType == JsonTokenType.Null) + { + return null; + } throw new ArgumentException($"Invalid token type found '{reader.TokenType}', expected a boolean value."); } From 27593f7632489059a417fa02e9d8a65f486c6e03 Mon Sep 17 00:00:00 2001 From: markwallace-microsoft <127216156+markwallace-microsoft@users.noreply.github.com> Date: Wed, 2 Apr 2025 17:47:50 +0100 Subject: [PATCH 5/6] Address code review feedback --- dotnet/SK-dotnet.sln | 1 + .../Settings/OpenAIPromptExecutionSettingsTests.cs | 3 +++ .../Functions/Functions.Prompty.UnitTests/PromptyTests.cs | 2 +- dotnet/src/Functions/Functions.Prompty/AssemblyInfo.cs | 6 ++++++ .../InternalUtilities/src/Text/OptionalBoolJsonConverter.cs | 4 ++++ .../AI/PromptExecutionSettings.cs | 1 + 6 files changed, 16 insertions(+), 1 deletion(-) create mode 100644 dotnet/src/Functions/Functions.Prompty/AssemblyInfo.cs diff --git a/dotnet/SK-dotnet.sln b/dotnet/SK-dotnet.sln index 970a7ef951f9..9b96d8d2a518 100644 --- a/dotnet/SK-dotnet.sln +++ b/dotnet/SK-dotnet.sln @@ -228,6 +228,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Text", "Text", "{EB2C141A-A src\InternalUtilities\src\Text\ExceptionJsonConverter.cs = src\InternalUtilities\src\Text\ExceptionJsonConverter.cs src\InternalUtilities\src\Text\JsonOptionsCache.cs = src\InternalUtilities\src\Text\JsonOptionsCache.cs OptionalBoolJsonConverter.cs = OptionalBoolJsonConverter.cs + src\InternalUtilities\src\Text\OptionalBoolJsonConverter.cs = src\InternalUtilities\src\Text\OptionalBoolJsonConverter.cs src\InternalUtilities\src\Text\SseData.cs = src\InternalUtilities\src\Text\SseData.cs src\InternalUtilities\src\Text\SseJsonParser.cs = src\InternalUtilities\src\Text\SseJsonParser.cs src\InternalUtilities\src\Text\SseLine.cs = src\InternalUtilities\src\Text\SseLine.cs diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs index 9423ad6fee14..0d5b7674fc04 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs @@ -451,6 +451,9 @@ public void ItCanCreateOpenAIPromptExecutionSettingsFromPromptExecutionSettingsW [Theory] [InlineData("")] + [InlineData(" true ")] + [InlineData("true ")] + [InlineData(" true")] [InlineData("123")] [InlineData("Foo")] [InlineData(1)] diff --git a/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTests.cs b/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTests.cs index 82d991d07c61..62718d07a128 100644 --- a/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTests.cs +++ b/dotnet/src/Functions/Functions.Prompty.UnitTests/PromptyTests.cs @@ -370,7 +370,7 @@ public void ItShouldLoadExecutionSettings() configuration: type: azure_openai_beta parameters: - logprobs: !!bool true + logprobs: true top_logprobs: 2 top_p: 1.0 user: Bob diff --git a/dotnet/src/Functions/Functions.Prompty/AssemblyInfo.cs b/dotnet/src/Functions/Functions.Prompty/AssemblyInfo.cs new file mode 100644 index 000000000000..a7534ccf9f38 --- /dev/null +++ b/dotnet/src/Functions/Functions.Prompty/AssemblyInfo.cs @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +// This assembly is currently experimental. +[assembly: Experimental("SKEXP0040")] diff --git a/dotnet/src/InternalUtilities/src/Text/OptionalBoolJsonConverter.cs b/dotnet/src/InternalUtilities/src/Text/OptionalBoolJsonConverter.cs index 46f900ab3603..3f67b3bcab1f 100644 --- a/dotnet/src/InternalUtilities/src/Text/OptionalBoolJsonConverter.cs +++ b/dotnet/src/InternalUtilities/src/Text/OptionalBoolJsonConverter.cs @@ -46,6 +46,10 @@ internal sealed class OptionalBoolJsonConverter : JsonConverter { return false; } + else if (reader.TokenType == JsonTokenType.Null) + { + return null; + } throw new ArgumentException($"Invalid token type found '{reader.TokenType}', expected a boolean value."); } diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs index b767c478b1bc..168f74219725 100644 --- a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs +++ b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs @@ -35,6 +35,7 @@ public class PromptExecutionSettings /// When provided, this service identifier will be the key in a dictionary collection of execution settings for both and . /// If not provided the service identifier will be the default value in . /// + [Experimental("SKEXP0001")] [JsonPropertyName("service_id")] public string? ServiceId { From 15a1fe9396f9ae57a65a429e17fa7585d2ba0847 Mon Sep 17 00:00:00 2001 From: markwallace-microsoft <127216156+markwallace-microsoft@users.noreply.github.com> Date: Thu, 10 Apr 2025 10:41:31 +0100 Subject: [PATCH 6/6] Address code review feedback --- .../Settings/OpenAIPromptExecutionSettingsTests.cs | 3 --- .../InternalUtilities/src/Text/BoolJsonConverter.cs | 10 +++------- .../src/Text/OptionalBoolJsonConverter.cs | 8 ++++++-- 3 files changed, 9 insertions(+), 12 deletions(-) diff --git a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs index 0d5b7674fc04..9423ad6fee14 100644 --- a/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs +++ b/dotnet/src/Connectors/Connectors.OpenAI.UnitTests/Settings/OpenAIPromptExecutionSettingsTests.cs @@ -451,9 +451,6 @@ public void ItCanCreateOpenAIPromptExecutionSettingsFromPromptExecutionSettingsW [Theory] [InlineData("")] - [InlineData(" true ")] - [InlineData("true ")] - [InlineData(" true")] [InlineData("123")] [InlineData("Foo")] [InlineData(1)] diff --git a/dotnet/src/InternalUtilities/src/Text/BoolJsonConverter.cs b/dotnet/src/InternalUtilities/src/Text/BoolJsonConverter.cs index f9e4ca21a3ad..a3c356dc4ab3 100644 --- a/dotnet/src/InternalUtilities/src/Text/BoolJsonConverter.cs +++ b/dotnet/src/InternalUtilities/src/Text/BoolJsonConverter.cs @@ -27,13 +27,9 @@ public override bool Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSer { return false; } - if (value.Equals("true", StringComparison.OrdinalIgnoreCase)) + if (bool.TryParse(value, out var boolValue)) { - return true; - } - else if (value.Equals("false", StringComparison.OrdinalIgnoreCase)) - { - return false; + return boolValue; } throw new ArgumentException($"Value '{value}' can be parsed as a boolean value"); @@ -53,6 +49,6 @@ public override bool Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSer /// public override void Write(Utf8JsonWriter writer, bool value, JsonSerializerOptions options) { - writer.WriteBooleanValue((bool)value); + writer.WriteBooleanValue(value); } } diff --git a/dotnet/src/InternalUtilities/src/Text/OptionalBoolJsonConverter.cs b/dotnet/src/InternalUtilities/src/Text/OptionalBoolJsonConverter.cs index 3f67b3bcab1f..ec0d0362c907 100644 --- a/dotnet/src/InternalUtilities/src/Text/OptionalBoolJsonConverter.cs +++ b/dotnet/src/InternalUtilities/src/Text/OptionalBoolJsonConverter.cs @@ -27,11 +27,15 @@ internal sealed class OptionalBoolJsonConverter : JsonConverter { return null; } - if (value.Equals("true", StringComparison.OrdinalIgnoreCase)) + if (bool.TryParse(value, out var boolValue)) + { + return boolValue; + } + if (value.Equals(bool.TrueString, StringComparison.OrdinalIgnoreCase)) { return true; } - else if (value.Equals("false", StringComparison.OrdinalIgnoreCase)) + else if (value.Equals(bool.FalseString, StringComparison.OrdinalIgnoreCase)) { return false; }