Skip to content

.Net: Add JsonConverter's to handle bool values when converting from PromptExecutionSettings #11336

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
16 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions dotnet/SK-dotnet.sln
Original file line number Diff line number Diff line change
Expand Up @@ -224,8 +224,10 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Functions.OpenApi.Extension
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Text", "Text", "{EB2C141A-AE5F-4080-8790-13EB16323CEF}"
ProjectSection(SolutionItems) = preProject
src\InternalUtilities\src\Text\BoolJsonConverter.cs = src\InternalUtilities\src\Text\BoolJsonConverter.cs
src\InternalUtilities\src\Text\ExceptionJsonConverter.cs = src\InternalUtilities\src\Text\ExceptionJsonConverter.cs
src\InternalUtilities\src\Text\JsonOptionsCache.cs = src\InternalUtilities\src\Text\JsonOptionsCache.cs
src\InternalUtilities\src\Text\OptionalBoolJsonConverter.cs = src\InternalUtilities\src\Text\OptionalBoolJsonConverter.cs
src\InternalUtilities\src\Text\SseData.cs = src\InternalUtilities\src\Text\SseData.cs
src\InternalUtilities\src\Text\SseJsonParser.cs = src\InternalUtilities\src\Text\SseJsonParser.cs
src\InternalUtilities\src\Text\SseLine.cs = src\InternalUtilities\src\Text\SseLine.cs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,7 @@ public string? ReturnLikelihoods
/// (Required to support streaming) Specify true to return the response piece-by-piece in real-time and false to return the complete response after the process finishes.
/// </summary>
[JsonPropertyName("stream")]
[JsonConverter(typeof(OptionalBoolJsonConverter))]
public bool? Stream
{
get => this._stream;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ public List<CohereCommandRTools.Document>? Documents
/// Defaults to false. When true, the response will only contain a list of generated search queries, but no search will take place, and no reply from the model to the user's message will be generated.
/// </summary>
[JsonPropertyName("search_queries_only")]
[JsonConverter(typeof(OptionalBoolJsonConverter))]
public bool? SearchQueriesOnly
{
get => this._searchQueriesOnly;
Expand Down Expand Up @@ -203,6 +204,7 @@ public int? Seed
/// Specify true to return the full prompt that was sent to the model. The default value is false. In the response, the prompt in the prompt field.
/// </summary>
[JsonPropertyName("return_prompt")]
[JsonConverter(typeof(OptionalBoolJsonConverter))]
public bool? ReturnPrompt
{
get => this._returnPrompt;
Expand Down Expand Up @@ -259,6 +261,7 @@ public List<string>? StopSequences
/// Specify true, to send the user's message to the model without any preprocessing, otherwise false.
/// </summary>
[JsonPropertyName("raw_prompting")]
[JsonConverter(typeof(OptionalBoolJsonConverter))]
public bool? RawPrompting
{
get => this._rawPrompting;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

using System;
using System.Collections.Generic;
using System.Text.Json;
using Azure.AI.OpenAI.Chat;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
Expand Down Expand Up @@ -64,6 +65,119 @@ public void ItRestoresOriginalFunctionChoiceBehavior()
Assert.Equal(functionChoiceBehavior, result.FunctionChoiceBehavior);
}

[Fact]
public void ItCanCreateAzureOpenAIPromptExecutionSettingsFromPromptExecutionSettings()
{
// Arrange
PromptExecutionSettings originalSettings = new()
{
ExtensionData = new Dictionary<string, object>()
{
{ "temperature", 0.7 },
{ "top_p", 0.7 },
{ "frequency_penalty", 0.7 },
{ "presence_penalty", 0.7 },
{ "stop_sequences", new string[] { "foo", "bar" } },
{ "chat_system_prompt", "chat system prompt" },
{ "token_selection_biases", new Dictionary<int, int>() { { 1, 2 }, { 3, 4 } } },
{ "max_tokens", 128 },
{ "logprobs", true },
{ "seed", 123456 },
{ "top_logprobs", 5 },
}
};

// Act
AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings);

// Assert
AssertExecutionSettings(executionSettings);
}

[Fact]
public void ItCanCreateAzureOpenAIPromptExecutionSettingsFromJson()
{
// Arrange
var json =
"""
{
"temperature": 0.7,
"top_p": 0.7,
"frequency_penalty": 0.7,
"presence_penalty": 0.7,
"stop_sequences": [ "foo", "bar" ],
"chat_system_prompt": "chat system prompt",
"token_selection_biases":
{
"1": "2",
"3": "4"
},
"max_tokens": 128,
"logprobs": true,
"seed": 123456,
"top_logprobs": 5
}
""";

// Act
var originalSettings = JsonSerializer.Deserialize<PromptExecutionSettings>(json);
OpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings);

// Assert
AssertExecutionSettings(executionSettings);
}

[Fact]
public void ItCanCreateAzureOpenAIPromptExecutionSettingsFromPromptExecutionSettingsWithIncorrectTypes()
{
// Arrange
PromptExecutionSettings originalSettings = new()
{
ExtensionData = new Dictionary<string, object>()
{
{ "temperature", "0.7" },
{ "top_p", "0.7" },
{ "frequency_penalty", "0.7" },
{ "presence_penalty", "0.7" },
{ "stop_sequences", new List<object> { "foo", "bar" } },
{ "chat_system_prompt", "chat system prompt" },
{ "token_selection_biases", new Dictionary<string, object>() { { "1", "2" }, { "3", "4" } } },
{ "max_tokens", "128" },
{ "logprobs", "true" },
{ "seed", "123456" },
{ "top_logprobs", "5" },
}
};

// Act
AzureOpenAIPromptExecutionSettings executionSettings = AzureOpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings);

// Assert
AssertExecutionSettings(executionSettings);
}

[Theory]
[InlineData("")]
[InlineData("123")]
[InlineData("Foo")]
[InlineData(1)]
[InlineData(1.0)]
public void ItCannotCreateAzureOpenAIPromptExecutionSettingsWithInvalidBoolValues(object value)
{
// Arrange
PromptExecutionSettings originalSettings = new()
{
ExtensionData = new Dictionary<string, object>()
{
{ "logprobs", value }
}
};

// Act & Assert
Assert.Throws<ArgumentException>(() => AzureOpenAIPromptExecutionSettings.FromExecutionSettings(originalSettings));
}

#region private
private static void AssertExecutionSettings(OpenAIPromptExecutionSettings executionSettings)
{
Assert.NotNull(executionSettings);
Expand All @@ -79,4 +193,5 @@ private static void AssertExecutionSettings(OpenAIPromptExecutionSettings execut
Assert.Equal(true, executionSettings.Logprobs);
Assert.Equal(5, executionSettings.TopLogprobs);
}
#endregion
}
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,7 @@ public GeminiToolCallBehavior? ToolCallBehavior
/// </summary>
[JsonPropertyName("audio_timestamp")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
[JsonConverter(typeof(OptionalBoolJsonConverter))]
public bool? AudioTimestamp
{
get => this._audioTimestamp;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -183,6 +183,7 @@ public bool UseCache
/// This may not be supported by all models/inference API.
/// </remarks>
[JsonPropertyName("wait_for_model")]
[JsonConverter(typeof(BoolJsonConverter))]
public bool WaitForModel
{
get => this._waitForModel;
Expand Down Expand Up @@ -233,6 +234,7 @@ public float? PresencePenalty
/// output token returned in the content of message.
/// </summary>
[JsonPropertyName("logprobs")]
[JsonConverter(typeof(OptionalBoolJsonConverter))]
public bool? LogProbs
{
get => this._logProbs;
Expand Down Expand Up @@ -294,6 +296,7 @@ public int? TopLogProbs
/// (Default: True). Bool. If set to False, the return results will not contain the original query making it easier for prompting.
/// </summary>
[JsonPropertyName("return_full_text")]
[JsonConverter(typeof(OptionalBoolJsonConverter))]
public bool? ReturnFullText
{
get => this._returnFullText;
Expand All @@ -309,6 +312,7 @@ public bool? ReturnFullText
/// (Optional: True). Bool. Whether or not to use sampling, use greedy decoding otherwise.
/// </summary>
[JsonPropertyName("do_sample")]
[JsonConverter(typeof(OptionalBoolJsonConverter))]
public bool? DoSample
{
get => this._doSample;
Expand All @@ -323,6 +327,8 @@ public bool? DoSample
/// <summary>
/// Show details of the generation. Including usage.
/// </summary>
[JsonPropertyName("details")]
[JsonConverter(typeof(OptionalBoolJsonConverter))]
public bool? Details
{
get => this._details;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@ public int? MaxTokens
/// Whether to inject a safety prompt before all conversations.
/// </summary>
[JsonPropertyName("safe_prompt")]
[JsonConverter(typeof(BoolJsonConverter))]
public bool SafePrompt
{
get => this._safePrompt;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
<Compile Include="$(RepoRoot)/dotnet/src/InternalUtilities/src/Diagnostics/RequiresDynamicCodeAttribute.cs" Link="%(RecursiveDir)%(Filename)%(Extension)" />
<Compile Include="$(RepoRoot)/dotnet/src/InternalUtilities/src/Diagnostics/UnconditionalSuppressMessageAttribute.cs" Link="%(RecursiveDir)%(Filename)%(Extension)" />
<Compile Include="$(RepoRoot)/dotnet/src/InternalUtilities/src/Text/JsonOptionsCache.cs" Link="%(RecursiveDir)%(Filename)%(Extension)" />
<Compile Include="$(RepoRoot)/dotnet/src/InternalUtilities/src/Text/OptionalBoolJsonConverter.cs" Link="%(RecursiveDir)%(Filename)%(Extension)" />
<Compile Include="$(RepoRoot)/dotnet/src/InternalUtilities/src/Text/ExceptionJsonConverter.cs" Link="%(RecursiveDir)%(Filename)%(Extension)" />
<Compile Include="$(RepoRoot)/dotnet/src/InternalUtilities/src/System/AppContextSwitchHelper.cs" Link="%(RecursiveDir)%(Filename)%(Extension)" />
</ItemGroup>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@ public static OnnxRuntimeGenAIPromptExecutionSettings FromExecutionSettings(Prom
/// The past/present kv tensors are shared and allocated once to max_length (cuda only)
/// </summary>
[JsonPropertyName("past_present_share_buffer")]
[JsonConverter(typeof(OptionalBoolJsonConverter))]
public bool? PastPresentShareBuffer { get; set; }

/// <summary>
Expand Down Expand Up @@ -139,11 +140,13 @@ public static OnnxRuntimeGenAIPromptExecutionSettings FromExecutionSettings(Prom
/// Allows the generation to stop early if all beam candidates reach the end token
/// </summary>
[JsonPropertyName("early_stopping")]
[JsonConverter(typeof(OptionalBoolJsonConverter))]
public bool? EarlyStopping { get; set; }

/// <summary>
/// Do random sampling
/// </summary>
[JsonPropertyName("do_sample")]
[JsonConverter(typeof(OptionalBoolJsonConverter))]
public bool? DoSample { get; set; }
}
Loading
Loading