From c7d2af6c76d41c6d806cc233de3c3c8b33420c40 Mon Sep 17 00:00:00 2001
From: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com>
Date: Thu, 16 May 2024 11:25:13 -0700
Subject: [PATCH 1/3] Added logprobs property to OpenAIPromptExecutionSettings
---
.../Connectors.OpenAI/AzureSdk/ClientCore.cs | 6 ++-
.../OpenAIPromptExecutionSettings.cs | 39 ++++++++++++++++++-
.../AzureOpenAIChatCompletionServiceTests.cs | 6 ++-
.../OpenAIPromptExecutionSettingsTests.cs | 20 ++++++++--
.../AzureOpenAITextGenerationServiceTests.cs | 4 +-
5 files changed, 67 insertions(+), 8 deletions(-)
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs
index 1b4a6389116a..e1372db6366a 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs
@@ -1036,7 +1036,7 @@ private static CompletionsOptions CreateCompletionsOptions(string text, OpenAIPr
Echo = false,
ChoicesPerPrompt = executionSettings.ResultsPerPrompt,
GenerationSampleCount = executionSettings.ResultsPerPrompt,
- LogProbabilityCount = null,
+ LogProbabilityCount = executionSettings.LogProbabilitiesCount,
User = executionSettings.User,
DeploymentName = deploymentOrModelName
};
@@ -1081,7 +1081,9 @@ private static ChatCompletionsOptions CreateChatCompletionsOptions(
ChoiceCount = executionSettings.ResultsPerPrompt,
DeploymentName = deploymentOrModelName,
Seed = executionSettings.Seed,
- User = executionSettings.User
+ User = executionSettings.User,
+ LogProbabilitiesPerToken = executionSettings.LogProbabilitiesCount,
+ EnableLogProbabilities = executionSettings.EnableLogProbabilities
};
switch (executionSettings.ResponseFormat)
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/OpenAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.OpenAI/OpenAIPromptExecutionSettings.cs
index f88cb18b7950..e14c41094dc3 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/OpenAIPromptExecutionSettings.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/OpenAIPromptExecutionSettings.cs
@@ -254,6 +254,39 @@ public string? User
}
}
+ ///
+ /// Whether to return log probabilities of the output tokens or not.
+ /// If true, returns the log probabilities of each output token returned in the `content` of `message`.
+ ///
+ [Experimental("SKEXP0010")]
+ [JsonPropertyName("logprobs")]
+ public bool? EnableLogProbabilities
+ {
+ get => this._enableLogProbabilities;
+
+ set
+ {
+ this.ThrowIfFrozen();
+ this._enableLogProbabilities = value;
+ }
+ }
+
+ ///
+ /// An integer specifying the number of most likely tokens to return at each token position, each with an associated log probability.
+ ///
+ [Experimental("SKEXP0010")]
+ [JsonPropertyName("top_logprobs")]
+ public int? LogProbabilitiesCount
+ {
+ get => this._logProbabilitiesCount;
+
+ set
+ {
+ this.ThrowIfFrozen();
+ this._logProbabilitiesCount = value;
+ }
+ }
+
///
public override void Freeze()
{
@@ -294,7 +327,9 @@ public override PromptExecutionSettings Clone()
TokenSelectionBiases = this.TokenSelectionBiases is not null ? new Dictionary(this.TokenSelectionBiases) : null,
ToolCallBehavior = this.ToolCallBehavior,
User = this.User,
- ChatSystemPrompt = this.ChatSystemPrompt
+ ChatSystemPrompt = this.ChatSystemPrompt,
+ EnableLogProbabilities = this.EnableLogProbabilities,
+ LogProbabilitiesCount = this.LogProbabilitiesCount
};
}
@@ -370,6 +405,8 @@ public static OpenAIPromptExecutionSettings FromExecutionSettingsWithData(Prompt
private ToolCallBehavior? _toolCallBehavior;
private string? _user;
private string? _chatSystemPrompt;
+ private bool? _enableLogProbabilities;
+ private int? _logProbabilitiesCount;
#endregion
}
diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs
index c8d6c0de5f40..83e3e4ec5ab4 100644
--- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs
@@ -161,7 +161,9 @@ public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync()
ResultsPerPrompt = 5,
Seed = 567,
TokenSelectionBiases = new Dictionary { { 2, 3 } },
- StopSequences = ["stop_sequence"]
+ StopSequences = ["stop_sequence"],
+ EnableLogProbabilities = true,
+ LogProbabilitiesCount = 5
};
var chatHistory = new ChatHistory();
@@ -218,6 +220,8 @@ public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync()
Assert.Equal(567, content.GetProperty("seed").GetInt32());
Assert.Equal(3, content.GetProperty("logit_bias").GetProperty("2").GetInt32());
Assert.Equal("stop_sequence", content.GetProperty("stop")[0].GetString());
+ Assert.True(content.GetProperty("logprobs").GetBoolean());
+ Assert.Equal(5, content.GetProperty("top_logprobs").GetInt32());
}
[Theory]
diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIPromptExecutionSettingsTests.cs
index 6def578e8821..5d3e86f9b363 100644
--- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIPromptExecutionSettingsTests.cs
+++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIPromptExecutionSettingsTests.cs
@@ -30,6 +30,8 @@ public void ItCreatesOpenAIExecutionSettingsWithCorrectDefaults()
Assert.Equal(1, executionSettings.ResultsPerPrompt);
Assert.Null(executionSettings.StopSequences);
Assert.Null(executionSettings.TokenSelectionBiases);
+ Assert.Null(executionSettings.LogProbabilitiesCount);
+ Assert.Null(executionSettings.EnableLogProbabilities);
Assert.Equal(128, executionSettings.MaxTokens);
}
@@ -47,6 +49,8 @@ public void ItUsesExistingOpenAIExecutionSettings()
StopSequences = new string[] { "foo", "bar" },
ChatSystemPrompt = "chat system prompt",
MaxTokens = 128,
+ EnableLogProbabilities = true,
+ LogProbabilitiesCount = 5,
TokenSelectionBiases = new Dictionary() { { 1, 2 }, { 3, 4 } },
};
@@ -97,6 +101,8 @@ public void ItCreatesOpenAIExecutionSettingsFromExtraPropertiesSnakeCase()
{ "max_tokens", 128 },
{ "token_selection_biases", new Dictionary() { { 1, 2 }, { 3, 4 } } },
{ "seed", 123456 },
+ { "logprobs", true },
+ { "top_logprobs", 5 },
}
};
@@ -105,7 +111,6 @@ public void ItCreatesOpenAIExecutionSettingsFromExtraPropertiesSnakeCase()
// Assert
AssertExecutionSettings(executionSettings);
- Assert.Equal(executionSettings.Seed, 123456);
}
[Fact]
@@ -124,7 +129,10 @@ public void ItCreatesOpenAIExecutionSettingsFromExtraPropertiesAsStrings()
{ "stop_sequences", new [] { "foo", "bar" } },
{ "chat_system_prompt", "chat system prompt" },
{ "max_tokens", "128" },
- { "token_selection_biases", new Dictionary() { { "1", "2" }, { "3", "4" } } }
+ { "token_selection_biases", new Dictionary() { { "1", "2" }, { "3", "4" } } },
+ { "seed", 123456 },
+ { "logprobs", true },
+ { "top_logprobs", 5 }
}
};
@@ -149,7 +157,10 @@ public void ItCreatesOpenAIExecutionSettingsFromJsonSnakeCase()
"stop_sequences": [ "foo", "bar" ],
"chat_system_prompt": "chat system prompt",
"token_selection_biases": { "1": 2, "3": 4 },
- "max_tokens": 128
+ "max_tokens": 128,
+ "seed": 123456,
+ "logprobs": true,
+ "top_logprobs": 5
}
""";
var actualSettings = JsonSerializer.Deserialize(json);
@@ -255,5 +266,8 @@ private static void AssertExecutionSettings(OpenAIPromptExecutionSettings execut
Assert.Equal("chat system prompt", executionSettings.ChatSystemPrompt);
Assert.Equal(new Dictionary() { { 1, 2 }, { 3, 4 } }, executionSettings.TokenSelectionBiases);
Assert.Equal(128, executionSettings.MaxTokens);
+ Assert.Equal(123456, executionSettings.Seed);
+ Assert.Equal(true, executionSettings.EnableLogProbabilities);
+ Assert.Equal(5, executionSettings.LogProbabilitiesCount);
}
}
diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/AzureOpenAITextGenerationServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/AzureOpenAITextGenerationServiceTests.cs
index 87f5526d5f83..003896a313b4 100644
--- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/AzureOpenAITextGenerationServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/AzureOpenAITextGenerationServiceTests.cs
@@ -126,7 +126,8 @@ public async Task GetTextContentsHandlesSettingsCorrectlyAsync()
PresencePenalty = 1.2,
ResultsPerPrompt = 5,
TokenSelectionBiases = new Dictionary { { 2, 3 } },
- StopSequences = ["stop_sequence"]
+ StopSequences = ["stop_sequence"],
+ LogProbabilitiesCount = 5
};
this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK)
@@ -154,6 +155,7 @@ public async Task GetTextContentsHandlesSettingsCorrectlyAsync()
Assert.Equal(5, content.GetProperty("best_of").GetInt32());
Assert.Equal(3, content.GetProperty("logit_bias").GetProperty("2").GetInt32());
Assert.Equal("stop_sequence", content.GetProperty("stop")[0].GetString());
+ Assert.Equal(5, content.GetProperty("logprobs").GetInt32());
}
[Fact]
From 2cd614b4d1de7bd7c6e00909c35ebf682f73a4fa Mon Sep 17 00:00:00 2001
From: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com>
Date: Thu, 16 May 2024 13:02:21 -0700
Subject: [PATCH 2/3] Renamed properties
---
.../Connectors.OpenAI/AzureSdk/ClientCore.cs | 6 +++---
.../OpenAIPromptExecutionSettings.cs | 20 +++++++++----------
.../AzureOpenAIChatCompletionServiceTests.cs | 4 ++--
.../OpenAIPromptExecutionSettingsTests.cs | 12 +++++------
.../AzureOpenAITextGenerationServiceTests.cs | 2 +-
5 files changed, 22 insertions(+), 22 deletions(-)
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs
index e1372db6366a..e9a96df526ca 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs
@@ -1036,7 +1036,7 @@ private static CompletionsOptions CreateCompletionsOptions(string text, OpenAIPr
Echo = false,
ChoicesPerPrompt = executionSettings.ResultsPerPrompt,
GenerationSampleCount = executionSettings.ResultsPerPrompt,
- LogProbabilityCount = executionSettings.LogProbabilitiesCount,
+ LogProbabilityCount = executionSettings.TopLogprobs,
User = executionSettings.User,
DeploymentName = deploymentOrModelName
};
@@ -1082,8 +1082,8 @@ private static ChatCompletionsOptions CreateChatCompletionsOptions(
DeploymentName = deploymentOrModelName,
Seed = executionSettings.Seed,
User = executionSettings.User,
- LogProbabilitiesPerToken = executionSettings.LogProbabilitiesCount,
- EnableLogProbabilities = executionSettings.EnableLogProbabilities
+ LogProbabilitiesPerToken = executionSettings.TopLogprobs,
+ EnableLogProbabilities = executionSettings.Logprobs
};
switch (executionSettings.ResponseFormat)
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/OpenAIPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.OpenAI/OpenAIPromptExecutionSettings.cs
index e14c41094dc3..b4097b7020da 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/OpenAIPromptExecutionSettings.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/OpenAIPromptExecutionSettings.cs
@@ -260,14 +260,14 @@ public string? User
///
[Experimental("SKEXP0010")]
[JsonPropertyName("logprobs")]
- public bool? EnableLogProbabilities
+ public bool? Logprobs
{
- get => this._enableLogProbabilities;
+ get => this._logprobs;
set
{
this.ThrowIfFrozen();
- this._enableLogProbabilities = value;
+ this._logprobs = value;
}
}
@@ -276,14 +276,14 @@ public bool? EnableLogProbabilities
///
[Experimental("SKEXP0010")]
[JsonPropertyName("top_logprobs")]
- public int? LogProbabilitiesCount
+ public int? TopLogprobs
{
- get => this._logProbabilitiesCount;
+ get => this._topLogprobs;
set
{
this.ThrowIfFrozen();
- this._logProbabilitiesCount = value;
+ this._topLogprobs = value;
}
}
@@ -328,8 +328,8 @@ public override PromptExecutionSettings Clone()
ToolCallBehavior = this.ToolCallBehavior,
User = this.User,
ChatSystemPrompt = this.ChatSystemPrompt,
- EnableLogProbabilities = this.EnableLogProbabilities,
- LogProbabilitiesCount = this.LogProbabilitiesCount
+ Logprobs = this.Logprobs,
+ TopLogprobs = this.TopLogprobs
};
}
@@ -405,8 +405,8 @@ public static OpenAIPromptExecutionSettings FromExecutionSettingsWithData(Prompt
private ToolCallBehavior? _toolCallBehavior;
private string? _user;
private string? _chatSystemPrompt;
- private bool? _enableLogProbabilities;
- private int? _logProbabilitiesCount;
+ private bool? _logprobs;
+ private int? _topLogprobs;
#endregion
}
diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs
index 83e3e4ec5ab4..159fcd7d852c 100644
--- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/ChatCompletion/AzureOpenAIChatCompletionServiceTests.cs
@@ -162,8 +162,8 @@ public async Task GetChatMessageContentsHandlesSettingsCorrectlyAsync()
Seed = 567,
TokenSelectionBiases = new Dictionary { { 2, 3 } },
StopSequences = ["stop_sequence"],
- EnableLogProbabilities = true,
- LogProbabilitiesCount = 5
+ Logprobs = true,
+ TopLogprobs = 5
};
var chatHistory = new ChatHistory();
diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIPromptExecutionSettingsTests.cs
index 5d3e86f9b363..c951f821b348 100644
--- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIPromptExecutionSettingsTests.cs
+++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/OpenAIPromptExecutionSettingsTests.cs
@@ -30,8 +30,8 @@ public void ItCreatesOpenAIExecutionSettingsWithCorrectDefaults()
Assert.Equal(1, executionSettings.ResultsPerPrompt);
Assert.Null(executionSettings.StopSequences);
Assert.Null(executionSettings.TokenSelectionBiases);
- Assert.Null(executionSettings.LogProbabilitiesCount);
- Assert.Null(executionSettings.EnableLogProbabilities);
+ Assert.Null(executionSettings.TopLogprobs);
+ Assert.Null(executionSettings.Logprobs);
Assert.Equal(128, executionSettings.MaxTokens);
}
@@ -49,8 +49,8 @@ public void ItUsesExistingOpenAIExecutionSettings()
StopSequences = new string[] { "foo", "bar" },
ChatSystemPrompt = "chat system prompt",
MaxTokens = 128,
- EnableLogProbabilities = true,
- LogProbabilitiesCount = 5,
+ Logprobs = true,
+ TopLogprobs = 5,
TokenSelectionBiases = new Dictionary() { { 1, 2 }, { 3, 4 } },
};
@@ -267,7 +267,7 @@ private static void AssertExecutionSettings(OpenAIPromptExecutionSettings execut
Assert.Equal(new Dictionary() { { 1, 2 }, { 3, 4 } }, executionSettings.TokenSelectionBiases);
Assert.Equal(128, executionSettings.MaxTokens);
Assert.Equal(123456, executionSettings.Seed);
- Assert.Equal(true, executionSettings.EnableLogProbabilities);
- Assert.Equal(5, executionSettings.LogProbabilitiesCount);
+ Assert.Equal(true, executionSettings.Logprobs);
+ Assert.Equal(5, executionSettings.TopLogprobs);
}
}
diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/AzureOpenAITextGenerationServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/AzureOpenAITextGenerationServiceTests.cs
index 003896a313b4..d20bb502e23d 100644
--- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/AzureOpenAITextGenerationServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextGeneration/AzureOpenAITextGenerationServiceTests.cs
@@ -127,7 +127,7 @@ public async Task GetTextContentsHandlesSettingsCorrectlyAsync()
ResultsPerPrompt = 5,
TokenSelectionBiases = new Dictionary { { 2, 3 } },
StopSequences = ["stop_sequence"],
- LogProbabilitiesCount = 5
+ TopLogprobs = 5
};
this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(HttpStatusCode.OK)
From f3b3feaa06bab0b477ea3f2ec0c28c80221cdde5 Mon Sep 17 00:00:00 2001
From: Dmytro Struk <13853051+dmytrostruk@users.noreply.github.com>
Date: Thu, 16 May 2024 15:17:32 -0700
Subject: [PATCH 3/3] Added integration test
---
.../OpenAI/OpenAICompletionTests.cs | 33 +++++++++++++++++++
1 file changed, 33 insertions(+)
diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs
index 6b07e9b7b7ba..a2285a1c4dd5 100644
--- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs
@@ -9,6 +9,7 @@
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
+using Azure.AI.OpenAI;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Http.Resilience;
@@ -504,6 +505,38 @@ public async Task SemanticKernelVersionHeaderIsSentAsync()
Assert.True(httpHeaderHandler.RequestHeaders.TryGetValues("Semantic-Kernel-Version", out var values));
}
+ [Theory(Skip = "This test is for manual verification.")]
+ [InlineData(null, null)]
+ [InlineData(false, null)]
+ [InlineData(true, 2)]
+ [InlineData(true, 5)]
+ public async Task LogProbsDataIsReturnedWhenRequestedAsync(bool? logprobs, int? topLogprobs)
+ {
+ // Arrange
+ var settings = new OpenAIPromptExecutionSettings { Logprobs = logprobs, TopLogprobs = topLogprobs };
+
+ this._kernelBuilder.Services.AddSingleton(this._logger);
+ var builder = this._kernelBuilder;
+ this.ConfigureAzureOpenAIChatAsText(builder);
+ Kernel target = builder.Build();
+
+ // Act
+ var result = await target.InvokePromptAsync("Hi, can you help me today?", new(settings));
+
+ var logProbabilityInfo = result.Metadata?["LogProbabilityInfo"] as ChatChoiceLogProbabilityInfo;
+
+ // Assert
+ if (logprobs is true)
+ {
+ Assert.NotNull(logProbabilityInfo);
+ Assert.Equal(topLogprobs, logProbabilityInfo.TokenLogProbabilityResults[0].TopLogProbabilityEntries.Count);
+ }
+ else
+ {
+ Assert.Null(logProbabilityInfo);
+ }
+ }
+
#region internals
private readonly XunitLogger _logger = new(output);