From f3da749015843243d3328c731e72124f4850ee1b Mon Sep 17 00:00:00 2001 From: Mark Wallace <127216156+markwallace-microsoft@users.noreply.github.com> Date: Mon, 31 Mar 2025 13:47:42 +0100 Subject: [PATCH 1/3] .Net: First step of adding OpenAI Response Agent (#11266) ### Motivation and Context 1. Basic samples working 2. Conversation state samples working ### Description ### Contribution Checklist - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone :smile: --- .../Step01_OpenAIResponseAgent.cs | 90 +++++++++++ .../Step02_ConversationState.cs | 110 +++++++++++++ .../ChatContentMessageExtensions.cs | 19 +++ .../Extensions/ResponseItemExtensions.cs | 57 +++++++ .../src/Agents/OpenAI/OpenAIResponseAgent.cs | 148 ++++++++++++++++++ .../OpenAI/OpenAIResponseAgentThread.cs | 147 +++++++++++++++++ .../Agents/UnitTests/Agents.UnitTests.csproj | 3 +- .../OpenAI/BaseOpenAIResponseClientTest.cs | 41 +++++ .../OpenAI/OpenAIResponseAgentTests.cs | 132 ++++++++++++++++ .../OpenAI/OpenAIResponseAgentThreadTests.cs | 137 ++++++++++++++++ .../AgentUtilities/BaseResponsesAgentTest.cs | 35 +++++ 11 files changed, 918 insertions(+), 1 deletion(-) create mode 100644 dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step01_OpenAIResponseAgent.cs create mode 100644 dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step02_ConversationState.cs create mode 100644 dotnet/src/Agents/OpenAI/Extensions/ResponseItemExtensions.cs create mode 100644 dotnet/src/Agents/OpenAI/OpenAIResponseAgent.cs create mode 100644 dotnet/src/Agents/OpenAI/OpenAIResponseAgentThread.cs create mode 100644 dotnet/src/Agents/UnitTests/OpenAI/BaseOpenAIResponseClientTest.cs create mode 100644 dotnet/src/Agents/UnitTests/OpenAI/OpenAIResponseAgentTests.cs create mode 100644 dotnet/src/Agents/UnitTests/OpenAI/OpenAIResponseAgentThreadTests.cs create mode 100644 dotnet/src/InternalUtilities/samples/AgentUtilities/BaseResponsesAgentTest.cs diff --git a/dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step01_OpenAIResponseAgent.cs b/dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step01_OpenAIResponseAgent.cs new file mode 100644 index 000000000000..ecfe7895e2a8 --- /dev/null +++ b/dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step01_OpenAIResponseAgent.cs @@ -0,0 +1,90 @@ +// Copyright (c) Microsoft. All rights reserved. +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace GettingStarted.OpenAIResponseAgents; + +/// +/// This example demonstrates using . +/// +public class Step01_OpenAIResponseAgent(ITestOutputHelper output) : BaseResponsesAgentTest(output) +{ + [Fact] + public async Task UseOpenAIResponseAgentAsync() + { + // Define the agent + OpenAIResponseAgent agent = new(this.Client) + { + Name = "ResponseAgent", + Instructions = "Answer all queries in English and French.", + }; + + // Invoke the agent and output the response + var responseItems = agent.InvokeAsync("What is the capital of France?"); + await foreach (ChatMessageContent responseItem in responseItems) + { + WriteAgentChatMessage(responseItem); + } + } + + [Fact] + public async Task UseOpenAIResponseAgentWithMessagesAsync() + { + // Define the agent + OpenAIResponseAgent agent = new(this.Client) + { + Name = "ResponseAgent", + Instructions = "Answer all queries in English and French." + }; + + ICollection messages = + [ + new ChatMessageContent(AuthorRole.User, "What is the capital of France?"), + new ChatMessageContent(AuthorRole.User, "What is the capital of Ireland?") + ]; + + // Invoke the agent and output the response + var responseItems = agent.InvokeAsync(messages); + await foreach (ChatMessageContent responseItem in responseItems) + { + WriteAgentChatMessage(responseItem); + } + } + + [Fact] + public async Task UseOpenAIResponseAgentWithThreadedConversationAsync() + { + // Define the agent + OpenAIResponseAgent agent = new(this.Client) + { + Name = "ResponseAgent", + Instructions = "Answer all queries in the users preferred language.", + }; + + string[] messages = + [ + "My name is Bob and my preferred language is French.", + "What is the capital of France?", + "What is the capital of Spain?", + "What is the capital of Italy?" + ]; + + // Initial thread can be null as it will be automatically created + AgentThread? agentThread = null; + + // Invoke the agent and output the response + foreach (string message in messages) + { + var responseItems = agent.InvokeAsync(new ChatMessageContent(AuthorRole.User, message), agentThread); + await foreach (AgentResponseItem responseItem in responseItems) + { + // Update the thread so the previous response id is used + agentThread = responseItem.Thread; + + WriteAgentChatMessage(responseItem.Message); + } + } + } +} diff --git a/dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step02_ConversationState.cs b/dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step02_ConversationState.cs new file mode 100644 index 000000000000..681906ac9bfa --- /dev/null +++ b/dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step02_ConversationState.cs @@ -0,0 +1,110 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Microsoft.SemanticKernel.ChatCompletion; + +namespace GettingStarted.OpenAIResponseAgents; + +/// +/// This example demonstrates how to manage conversation state during a model interaction using . +/// OpenAI provides a few ways to manage conversation state, which is important for preserving information across multiple messages or turns in a conversation. +/// +public class Step02_ConversationState(ITestOutputHelper output) : BaseResponsesAgentTest(output) +{ + [Fact] + public async Task ManuallyConstructPastConversationAsync() + { + // Define the agent + OpenAIResponseAgent agent = new(this.Client) + { + StoreEnabled = false, + }; + + ICollection messages = + [ + new ChatMessageContent(AuthorRole.User, "knock knock."), + new ChatMessageContent(AuthorRole.Assistant, "Who's there?"), + new ChatMessageContent(AuthorRole.User, "Orange.") + ]; + + // Invoke the agent and output the response + var responseItems = agent.InvokeAsync(messages); + await foreach (ChatMessageContent responseItem in responseItems) + { + WriteAgentChatMessage(responseItem); + } + } + + [Fact] + public async Task ManuallyManageConversationStateWithResponsesChatCompletionApiAsync() + { + // Define the agent + OpenAIResponseAgent agent = new(this.Client) + { + StoreEnabled = false, + }; + + string[] messages = + [ + "Tell me a joke?", + "Tell me another?", + ]; + + // Invoke the agent and output the response + AgentThread? agentThread = null; + foreach (string message in messages) + { + var responseItems = agent.InvokeAsync(new ChatMessageContent(AuthorRole.User, message), agentThread); + await foreach (AgentResponseItem responseItem in responseItems) + { + agentThread = responseItem.Thread; + WriteAgentChatMessage(responseItem.Message); + } + } + } + + [Fact] + public async Task ManageConversationStateWithResponseApiAsync() + { + // Define the agent + OpenAIResponseAgent agent = new(this.Client) + { + StoreEnabled = true, + }; + + string[] messages = + [ + "Tell me a joke?", + "Explain why this is funny.", + ]; + + // Invoke the agent and output the response + AgentThread? agentThread = null; + foreach (string message in messages) + { + var responseItems = agent.InvokeAsync(new ChatMessageContent(AuthorRole.User, message), agentThread); + await foreach (AgentResponseItem responseItem in responseItems) + { + agentThread = responseItem.Thread; + WriteAgentChatMessage(responseItem.Message); + } + } + + if (agentThread is not null) + { + var responseAgentThread = agentThread as OpenAIResponseAgentThread; + var threadMessages = responseAgentThread?.GetMessagesAsync(); + if (threadMessages is not null) + { + await foreach (var threadMessage in threadMessages) + { + WriteAgentChatMessage(threadMessage); + } + } + + await agentThread.DeleteAsync(); + } + } +} diff --git a/dotnet/src/Agents/OpenAI/Extensions/ChatContentMessageExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/ChatContentMessageExtensions.cs index 5cd0055d8456..3fbeb3d870aa 100644 --- a/dotnet/src/Agents/OpenAI/Extensions/ChatContentMessageExtensions.cs +++ b/dotnet/src/Agents/OpenAI/Extensions/ChatContentMessageExtensions.cs @@ -1,8 +1,10 @@ // Copyright (c) Microsoft. All rights reserved. +using System; using System.Collections.Generic; using System.Linq; using Microsoft.SemanticKernel.Agents.OpenAI.Internal; using OpenAI.Assistants; +using OpenAI.Responses; namespace Microsoft.SemanticKernel.Agents.OpenAI; @@ -33,4 +35,21 @@ public static IEnumerable ToThreadInitializationMes { return messages.Select(message => message.ToThreadInitializationMessage()); } + + /// + /// Converts a instance to a . + /// + /// The chat message content to convert. + /// A instance. + public static ResponseItem ToResponseItem(this ChatMessageContent message) + { + return message.Role.Label switch + { + "system" => ResponseItem.CreateSystemMessageItem(message.Content), + "user" => ResponseItem.CreateUserMessageItem(message.Content), + "developer" => ResponseItem.CreateDeveloperMessageItem(message.Content), + "assistant" => ResponseItem.CreateAssistantMessageItem(message.Content), + _ => throw new NotSupportedException($"Unsupported role {message.Role.Label}. Only system, user, developer or assistant roles are allowed."), + }; + } } diff --git a/dotnet/src/Agents/OpenAI/Extensions/ResponseItemExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/ResponseItemExtensions.cs new file mode 100644 index 000000000000..1760863ef7de --- /dev/null +++ b/dotnet/src/Agents/OpenAI/Extensions/ResponseItemExtensions.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Responses; + +namespace Microsoft.SemanticKernel.Agents.OpenAI; + +[ExcludeFromCodeCoverage] +internal static class ResponseItemExtensions +{ + /// + /// Converts a instance to a . + /// + /// The response item to convert. + /// A instance. + public static ChatMessageContent ToChatMessageContent(this ResponseItem item) + { + if (item is MessageResponseItem messageResponseItem) + { + var role = messageResponseItem.Role.ToAuthorRole(); + var collection = messageResponseItem.Content.ToChatMessageContentItemCollection(); + + return new ChatMessageContent(role, collection, innerContent: messageResponseItem); + } + throw new InvalidOperationException(); + } + + #region private + private static ChatMessageContentItemCollection ToChatMessageContentItemCollection(this IList content) + { + var collection = new ChatMessageContentItemCollection(); + foreach (var part in content) + { + if (part.Kind == ResponseContentPartKind.OutputText) + { + collection.Add(new TextContent(part.Text)); + } + } + return collection; + } + + private static AuthorRole ToAuthorRole(this MessageRole messageRole) + { + return messageRole switch + { + MessageRole.Assistant => AuthorRole.Assistant, + MessageRole.Developer => AuthorRole.Developer, + MessageRole.System => AuthorRole.System, + MessageRole.User => AuthorRole.User, + _ => new AuthorRole("unknown"), + }; + } + #endregion +} diff --git a/dotnet/src/Agents/OpenAI/OpenAIResponseAgent.cs b/dotnet/src/Agents/OpenAI/OpenAIResponseAgent.cs new file mode 100644 index 000000000000..ab2e5a4fe7df --- /dev/null +++ b/dotnet/src/Agents/OpenAI/OpenAIResponseAgent.cs @@ -0,0 +1,148 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.Agents.Extensions; +using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Responses; + +namespace Microsoft.SemanticKernel.Agents.OpenAI; + +/// +/// Represents a specialization based on Open AI Assistant / GPT. +/// +[ExcludeFromCodeCoverage] +public sealed class OpenAIResponseAgent : KernelAgent +{ + /// + /// Initializes a new instance of the class. + /// + /// The OpenAI provider for accessing the Responses API service. + public OpenAIResponseAgent(OpenAIResponseClient client) + { + Verify.NotNull(client); + + this.Client = client; + } + + /// + /// Expose client for additional use. + /// + public OpenAIResponseClient Client { get; } + + /// + /// Storing of messages is enabled. + /// + public bool StoreEnabled { get; init; } = true; + + /// + public override async IAsyncEnumerable> InvokeAsync(ICollection messages, AgentThread? thread = null, AgentInvokeOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + Verify.NotNull(messages); + + var agentThread = await this.EnsureThreadExistsWithMessagesAsync( + messages, + thread, + () => new OpenAIResponseAgentThread(this.Client, this.StoreEnabled), + cancellationToken).ConfigureAwait(false); + + // Invoke responses with the updated chat history. + var chatHistory = new ChatHistory(); + chatHistory.AddRange(messages); + var invokeResults = this.InternalInvokeAsync( + this.Name, + chatHistory, + agentThread, + options, + cancellationToken); + + // Notify the thread of new messages and return them to the caller. + await foreach (var result in invokeResults.ConfigureAwait(false)) + { + await this.NotifyThreadOfNewMessage(agentThread, result, cancellationToken).ConfigureAwait(false); + yield return new(result, agentThread); + } + } + + /// + public override IAsyncEnumerable> InvokeStreamingAsync(ICollection messages, AgentThread? thread = null, AgentInvokeOptions? options = null, CancellationToken cancellationToken = default) + { + throw new NotImplementedException(); + } + + /// + [Experimental("SKEXP0110")] + protected override Task CreateChannelAsync(CancellationToken cancellationToken) + { + throw new NotImplementedException(); + } + + /// + [Experimental("SKEXP0110")] + protected override IEnumerable GetChannelKeys() + { + throw new NotImplementedException(); + } + + /// + [Experimental("SKEXP0110")] + protected override Task RestoreChannelAsync(string channelState, CancellationToken cancellationToken) + { + throw new NotImplementedException(); + } + + #region private + private async IAsyncEnumerable InternalInvokeAsync( + string? agentName, + ChatHistory history, + OpenAIResponseAgentThread agentThread, + AgentInvokeOptions? options, + [EnumeratorCancellation] CancellationToken cancellationToken) + { + var kernel = options?.Kernel ?? this.Kernel; + var arguments = this.MergeArguments(options?.KernelArguments); + + var overrideHistory = history; + if (!this.StoreEnabled) + { + // Use the thread chat history + overrideHistory = [.. agentThread.ChatHistory, .. history]; + } + + var inputItems = overrideHistory.Select(c => c.ToResponseItem()); + var creationOptions = new ResponseCreationOptions() + { + EndUserId = this.GetDisplayName(), + Instructions = $"{this.Instructions}\n{options?.AdditionalInstructions}", + StoredOutputEnabled = agentThread.StoreEnabled, + }; + if (agentThread.StoreEnabled && agentThread.Id != null) + { + creationOptions.PreviousResponseId = agentThread.Id; + } + + var clientResult = await this.Client.CreateResponseAsync(inputItems, creationOptions, cancellationToken).ConfigureAwait(false); + var response = clientResult.Value; + + if (this.StoreEnabled) + { + // Update the response id + agentThread.ResponseId = response.Id; + } + + var messages = response.OutputItems.Select(o => o.ToChatMessageContent()); + + foreach (ChatMessageContent message in messages) + { + message.AuthorName = this.Name; + + yield return message; + } + } + #endregion +} diff --git a/dotnet/src/Agents/OpenAI/OpenAIResponseAgentThread.cs b/dotnet/src/Agents/OpenAI/OpenAIResponseAgentThread.cs new file mode 100644 index 000000000000..b8688e1e7f43 --- /dev/null +++ b/dotnet/src/Agents/OpenAI/OpenAIResponseAgentThread.cs @@ -0,0 +1,147 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.SemanticKernel.ChatCompletion; +using OpenAI.Responses; + +namespace Microsoft.SemanticKernel.Agents.OpenAI; + +/// +/// Represents a conversation thread for an OpenAI responses-based agent. +/// +[ExcludeFromCodeCoverage] +public sealed class OpenAIResponseAgentThread : AgentThread +{ + private readonly OpenAIResponseClient _client; + private readonly ChatHistory _chatHistory = new(); + private bool _isDeleted = false; + + /// + /// Initializes a new instance of the class. + /// + /// The agents client to use for interacting with responses. + /// Enable storing messages on the server. + public OpenAIResponseAgentThread(OpenAIResponseClient client, bool enableStore = false) + { + Verify.NotNull(client); + + this._client = client; + this.StoreEnabled = enableStore; + } + + /// + /// Initializes a new instance of the class that resumes an existing response. + /// + /// The agents client to use for interacting with responses. + /// The ID of an existing response to resume. + /// Enable storing messages on the server. + public OpenAIResponseAgentThread(OpenAIResponseClient client, string id, bool enableStore = false) + { + Verify.NotNull(client); + Verify.NotNull(id); + + this._client = client; + this.ResponseId = id; + this.StoreEnabled = enableStore; + } + + /// + /// Storing of messages is enabled. + /// + public bool StoreEnabled { get; private set; } = false; + + /// + /// The current response id. + /// + internal string? ResponseId { get; set; } + + /// + /// The current chat history. + /// + internal ChatHistory ChatHistory => this._chatHistory; + + /// + public override string? Id => this.ResponseId; + + /// + protected override Task CreateInternalAsync(CancellationToken cancellationToken = default) + { + if (this._isDeleted) + { + throw new InvalidOperationException("This thread has been deleted and cannot be recreated."); + } + + // Enable storing + this.StoreEnabled = true; + + // Id will not be available until after a message is sent + return Task.FromResult(null); + } + + /// + protected override Task DeleteInternalAsync(CancellationToken cancellationToken = default) + { + if (this._isDeleted) + { + return Task.CompletedTask; + } + + if (this.ResponseId is null) + { + throw new InvalidOperationException("This thread cannot be deleted, since it has not been created."); + } + + this._chatHistory.Clear(); + this._isDeleted = true; + + return Task.CompletedTask; + } + + /// + protected override Task OnNewMessageInternalAsync(ChatMessageContent newMessage, CancellationToken cancellationToken = default) + { + if (this._isDeleted) + { + throw new InvalidOperationException("This thread has been deleted and cannot be used anymore."); + } + + // Keep track of locally + if (string.IsNullOrEmpty(this.ResponseId)) + { + this._chatHistory.Add(newMessage); + } + + return Task.CompletedTask; + } + + /// + public async IAsyncEnumerable GetMessagesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default) + { + if (this._isDeleted) + { + throw new InvalidOperationException("This thread has been deleted and cannot be used anymore."); + } + + if (this.StoreEnabled && !string.IsNullOrEmpty(this.ResponseId)) + { + var options = new ResponseItemCollectionOptions(); + var collectionResult = this._client.GetResponseInputItemsAsync(this.ResponseId, options, cancellationToken).ConfigureAwait(false); + await foreach (var responseItem in collectionResult) + { + yield return responseItem.ToChatMessageContent(); + } + } + else + { + foreach (var message in this._chatHistory) + { + yield return message; + } + } + } +} diff --git a/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj b/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj index 752bd3c1ebcb..4e9dffef8934 100644 --- a/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj +++ b/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj @@ -46,7 +46,8 @@ - + + \ No newline at end of file diff --git a/dotnet/src/Agents/UnitTests/OpenAI/BaseOpenAIResponseClientTest.cs b/dotnet/src/Agents/UnitTests/OpenAI/BaseOpenAIResponseClientTest.cs new file mode 100644 index 000000000000..c2462ea1128f --- /dev/null +++ b/dotnet/src/Agents/UnitTests/OpenAI/BaseOpenAIResponseClientTest.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.ClientModel; +using System.ClientModel.Primitives; +using System.Net.Http; +using OpenAI; +using OpenAI.Responses; + +namespace SemanticKernel.Agents.UnitTests.OpenAI; + +/// +/// Base tests which use +/// +public class BaseOpenAIResponseClientTest : IDisposable +{ + internal MultipleHttpMessageHandlerStub MessageHandlerStub { get; } + internal HttpClient HttpClient { get; } + internal OpenAIResponseClient Client { get; } + + internal BaseOpenAIResponseClientTest() + { + this.MessageHandlerStub = new MultipleHttpMessageHandlerStub(); + this.HttpClient = new HttpClient(this.MessageHandlerStub, disposeHandler: false); + + var clientOptions = new OpenAIClientOptions() + { + Transport = new HttpClientPipelineTransport(this.HttpClient) + }; + this.Client = new OpenAIResponseClient("model", new ApiKeyCredential("apiKey"), clientOptions); + } + + /// + public void Dispose() + { + this.MessageHandlerStub.Dispose(); + this.HttpClient.Dispose(); + + GC.SuppressFinalize(this); + } +} diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIResponseAgentTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIResponseAgentTests.cs new file mode 100644 index 000000000000..86f8ff4f486b --- /dev/null +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIResponseAgentTests.cs @@ -0,0 +1,132 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System; +using System.Linq; +using System.Net.Http; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.OpenAI; + +/// +/// Tests for the class. +/// +public sealed class OpenAIResponseAgentTests : BaseOpenAIResponseClientTest +{ + /// + /// Tests that the constructor verifies parameters and throws when necessary. + /// + [Fact] + public void ConstructorShouldVerifyParams() + { + // Arrange & Act & Assert + Assert.Throws(() => new OpenAIResponseAgent(null!)); + } + + /// + /// Tests that the OpenAIResponseAgent.InvokeAsync verifies parameters and throws when necessary. + /// + [Fact] + public void InvokeShouldVerifyParams() + { + // Arrange + var agent = new OpenAIResponseAgent(this.Client); + string nullString = null!; + ChatMessageContent nullMessage = null!; + + // Act & Assert + Assert.Throws(() => agent.InvokeAsync(nullString)); + Assert.Throws(() => agent.InvokeAsync(nullMessage)); + } + + /// + /// Tests that the OpenAIResponseAgent.InvokeAsync verifies parameters and throws when necessary. + /// + [Fact] + public async Task VerifyInvokeAsync() + { + // Arrange + this.MessageHandlerStub.ResponsesToReturn.Add( + new HttpResponseMessage(System.Net.HttpStatusCode.OK) { Content = new StringContent(InvokeResponse) } + ); + var agent = new OpenAIResponseAgent(this.Client) + { + Name = "ResponseAgent", + Instructions = "Answer all queries in English and French.", + }; + + // Act + var responseItems = agent.InvokeAsync("What is the capital of France?"); + + // Assert + Assert.NotNull(responseItems); + var items = await responseItems!.ToListAsync>(); + Assert.Single(items); + Assert.Equal("The capital of France is Paris.\n\nLa capitale de la France est Paris.", items[0].Message.Content); + } + + #region private + private const string InvokeResponse = + """ + { + "id": "resp_67e8f5cf761c8191aab763d1e901e3410bbdc4b8da506cd2", + "object": "response", + "created_at": 1743320527, + "status": "completed", + "error": null, + "incomplete_details": null, + "instructions": "Answer all queries in English and French.", + "max_output_tokens": null, + "model": "gpt-4o-2024-08-06", + "output": [ + { + "type": "message", + "id": "msg_67e8f5cfbe688191a428ed9869c39fea0bbdc4b8da506cd2", + "status": "completed", + "role": "assistant", + "content": [ + { + "type": "output_text", + "text": "The capital of France is Paris.\n\nLa capitale de la France est Paris.", + "annotations": [] + } + ] + } + ], + "parallel_tool_calls": true, + "previous_response_id": null, + "reasoning": { + "effort": null, + "generate_summary": null + }, + "store": true, + "temperature": 1.0, + "text": { + "format": { + "type": "text" + } + }, + "tool_choice": "auto", + "tools": [], + "top_p": 1.0, + "truncation": "disabled", + "usage": { + "input_tokens": 26, + "input_tokens_details": { + "cached_tokens": 0 + }, + "output_tokens": 16, + "output_tokens_details": { + "reasoning_tokens": 0 + }, + "total_tokens": 42 + }, + "user": "ResponseAgent", + "metadata": {} + } + """; + #endregion +} diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIResponseAgentThreadTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIResponseAgentThreadTests.cs new file mode 100644 index 000000000000..3fd424351584 --- /dev/null +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIResponseAgentThreadTests.cs @@ -0,0 +1,137 @@ +// Copyright (c) Microsoft. All rights reserved. +using System; +using System.Linq; +using System.Net.Http; +using System.Threading.Tasks; +using Microsoft.SemanticKernel; +using Microsoft.SemanticKernel.Agents.OpenAI; +using Xunit; + +namespace SemanticKernel.Agents.UnitTests.OpenAI; + +/// +/// Tests for the class. +/// +public sealed class OpenAIResponseAgentThreadTests : BaseOpenAIResponseClientTest +{ + /// + /// Tests that the constructor verifies parameters and throws when necessary. + /// + [Fact] + public void ConstructorShouldVerifyParams() + { + // Arrange & Act & Assert + Assert.Throws(() => new OpenAIResponseAgentThread(null!)); + Assert.Throws(() => new OpenAIResponseAgentThread(null!, "threadId")); + Assert.Throws(() => new OpenAIResponseAgentThread(this.Client, id: null!)); + + var agentThread = new OpenAIResponseAgentThread(this.Client); + Assert.NotNull(agentThread); + } + + /// + /// Tests that the constructor for resuming a thread uses the provided parameters. + /// + [Fact] + public void ConstructorForResumingThreadShouldUseParams() + { + // Arrange & Act + var agentThread = new OpenAIResponseAgentThread(this.Client, "threadId"); + + // Assert + Assert.NotNull(agentThread); + Assert.Equal("threadId", agentThread.Id); + } + + /// + /// Verify returned when store is disabled. + /// + [Fact] + public async Task VerifyGetMessagesWhenStoreDisabledAsync() + { + // Arrange + var thread = new OpenAIResponseAgentThread(this.Client); + + // Act + var messages = thread.GetMessagesAsync(); + + // Assert + Assert.NotNull(messages); + var messagesList = await messages!.ToListAsync(); + Assert.Empty(messagesList); + } + + /// + /// Verify returned when store is disabled. + /// + [Fact] + public async Task VerifyGetMessagesWhenStoreEnabledAsync() + { + // Arrange + this.MessageHandlerStub.ResponsesToReturn.Add( + new HttpResponseMessage(System.Net.HttpStatusCode.OK) { Content = new StringContent(MessagesResponse) } + ); + var responseId = "resp_67e8ff743ea08191b085bea42b4d83e809a3a922c4f4221b"; + var thread = new OpenAIResponseAgentThread(this.Client, id: responseId, enableStore: true); + + // Act + var messages = thread.GetMessagesAsync(); + + // Assert + Assert.NotNull(messages); + var messagesList = await messages!.ToListAsync(); + Assert.Equal(3, messagesList.Count); + } + + #region private + private const string MessagesResponse = + """ + { + "object": "list", + "data": [ + { + "type": "message", + "id": "msg_67e8ff7445408191af5d6f4a87a9d3fe09a3a922c4f4221b", + "status": "completed", + "role": "user", + "content": [ + { + "type": "input_text", + "text": "Explain why this is funny." + } + ] + }, + { + "type": "message", + "id": "msg_67e8ff73be188191b871e41c2816355209a3a922c4f4221b", + "status": "completed", + "role": "assistant", + "content": [ + { + "type": "output_text", + "text": "Why don't skeletons fight each other?\n\nThey don't have the guts!", + "annotations": [] + } + ] + }, + { + "type": "message", + "id": "msg_67e8ff7258a081919e7964ac7b344bc909a3a922c4f4221b", + "status": "completed", + "role": "user", + "content": [ + { + "type": "input_text", + "text": "Tell me a joke?" + } + ] + } + ], + "first_id": "msg_67e8ff7445408191af5d6f4a87a9d3fe09a3a922c4f4221b", + "last_id": "msg_67e8ff7258a081919e7964ac7b344bc909a3a922c4f4221b", + "has_more": false + } + + """; + #endregion +} diff --git a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseResponsesAgentTest.cs b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseResponsesAgentTest.cs new file mode 100644 index 000000000000..3ce762447ca0 --- /dev/null +++ b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseResponsesAgentTest.cs @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.ClientModel; +using System.ClientModel.Primitives; +using Microsoft.SemanticKernel.Agents.OpenAI; +using OpenAI; +using OpenAI.Responses; + +/// +/// Base class for samples that demonstrate the usage of . +/// +public abstract class BaseResponsesAgentTest : BaseAgentsTest +{ + protected BaseResponsesAgentTest(ITestOutputHelper output) : base(output) + { + var options = new OpenAIClientOptions(); + if (this.EnableLogging) + { + options.MessageLoggingPolicy = new MessageLoggingPolicy(new() + { + EnableLogging = true, + EnableMessageLogging = true, + EnableMessageContentLogging = true, + LoggerFactory = this.LoggerFactory + }); + } + + this.Client = new(model: "gpt-4o", credential: new ApiKeyCredential(TestConfiguration.OpenAI.ApiKey), options: options); + } + + protected bool EnableLogging { get; set; } = true; + + /// + protected override OpenAIResponseClient Client { get; } +} From acdfcb2c4506033891a4b59ff0285f84e0b9f740 Mon Sep 17 00:00:00 2001 From: Mark Wallace <127216156+markwallace-microsoft@users.noreply.github.com> Date: Tue, 8 Apr 2025 14:39:56 +0100 Subject: [PATCH 2/3] .Net: Fixes required after latest update from main (#11431) ### Motivation and Context 1. `MergeArguments` was made internal so I have made a copy for now but I need to change this code so it can be shared in the main branch. 2. Also did some clean-up of the getting started samples. ### Description ### Contribution Checklist - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone :smile: --- .../OpenAIResponse/Step02_ConversationState.cs | 17 +++++++++++++++-- .../OpenAI/Extensions/ResponseItemExtensions.cs | 16 ++++++++++++++-- dotnet/src/Agents/OpenAI/OpenAIResponseAgent.cs | 9 ++++----- .../Agents/OpenAI/OpenAIResponseAgentThread.cs | 3 --- .../AgentUtilities/BaseResponsesAgentTest.cs | 2 +- 5 files changed, 34 insertions(+), 13 deletions(-) diff --git a/dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step02_ConversationState.cs b/dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step02_ConversationState.cs index 681906ac9bfa..35d6c367ec97 100644 --- a/dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step02_ConversationState.cs +++ b/dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step02_ConversationState.cs @@ -28,6 +28,10 @@ public async Task ManuallyConstructPastConversationAsync() new ChatMessageContent(AuthorRole.Assistant, "Who's there?"), new ChatMessageContent(AuthorRole.User, "Orange.") ]; + foreach (ChatMessageContent message in messages) + { + WriteAgentChatMessage(message); + } // Invoke the agent and output the response var responseItems = agent.InvokeAsync(messages); @@ -56,7 +60,10 @@ public async Task ManuallyManageConversationStateWithResponsesChatCompletionApiA AgentThread? agentThread = null; foreach (string message in messages) { - var responseItems = agent.InvokeAsync(new ChatMessageContent(AuthorRole.User, message), agentThread); + var userMessage = new ChatMessageContent(AuthorRole.User, message); + WriteAgentChatMessage(userMessage); + + var responseItems = agent.InvokeAsync(userMessage, agentThread); await foreach (AgentResponseItem responseItem in responseItems) { agentThread = responseItem.Thread; @@ -84,16 +91,22 @@ public async Task ManageConversationStateWithResponseApiAsync() AgentThread? agentThread = null; foreach (string message in messages) { - var responseItems = agent.InvokeAsync(new ChatMessageContent(AuthorRole.User, message), agentThread); + var userMessage = new ChatMessageContent(AuthorRole.User, message); + WriteAgentChatMessage(userMessage); + + var responseItems = agent.InvokeAsync(userMessage, agentThread); await foreach (AgentResponseItem responseItem in responseItems) { agentThread = responseItem.Thread; + this.Output.WriteLine(agentThread.Id); WriteAgentChatMessage(responseItem.Message); } } + // Display the contents in the latest thread if (agentThread is not null) { + this.Output.WriteLine("\n\nResponse Thread Messages\n"); var responseAgentThread = agentThread as OpenAIResponseAgentThread; var threadMessages = responseAgentThread?.GetMessagesAsync(); if (threadMessages is not null) diff --git a/dotnet/src/Agents/OpenAI/Extensions/ResponseItemExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/ResponseItemExtensions.cs index 1760863ef7de..07290327c1c3 100644 --- a/dotnet/src/Agents/OpenAI/Extensions/ResponseItemExtensions.cs +++ b/dotnet/src/Agents/OpenAI/Extensions/ResponseItemExtensions.cs @@ -34,9 +34,21 @@ private static ChatMessageContentItemCollection ToChatMessageContentItemCollecti var collection = new ChatMessageContentItemCollection(); foreach (var part in content) { - if (part.Kind == ResponseContentPartKind.OutputText) + if (part.Kind == ResponseContentPartKind.OutputText || part.Kind == ResponseContentPartKind.InputText) { - collection.Add(new TextContent(part.Text)); + collection.Add(new TextContent(part.Text, innerContent: part)); + } + else if (part.Kind == ResponseContentPartKind.InputImage) + { + collection.Add(new FileReferenceContent(part.InputImageFileId, innerContent: part)); + } + else if (part.Kind == ResponseContentPartKind.InputFile) + { + collection.Add(new FileReferenceContent(part.InputFileId, innerContent: part)); + } + else if (part.Kind == ResponseContentPartKind.Refusal) + { + collection.Add(new TextContent(part.Refusal, innerContent: part)); } } return collection; diff --git a/dotnet/src/Agents/OpenAI/OpenAIResponseAgent.cs b/dotnet/src/Agents/OpenAI/OpenAIResponseAgent.cs index ab2e5a4fe7df..7212f9c8b22e 100644 --- a/dotnet/src/Agents/OpenAI/OpenAIResponseAgent.cs +++ b/dotnet/src/Agents/OpenAI/OpenAIResponseAgent.cs @@ -14,10 +14,10 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI; /// -/// Represents a specialization based on Open AI Assistant / GPT. +/// Represents a specialization based on Open AI Assistant / GPT. /// [ExcludeFromCodeCoverage] -public sealed class OpenAIResponseAgent : KernelAgent +public sealed class OpenAIResponseAgent : Agent { /// /// Initializes a new instance of the class. @@ -105,7 +105,6 @@ private async IAsyncEnumerable InternalInvokeAsync( [EnumeratorCancellation] CancellationToken cancellationToken) { var kernel = options?.Kernel ?? this.Kernel; - var arguments = this.MergeArguments(options?.KernelArguments); var overrideHistory = history; if (!this.StoreEnabled) @@ -121,9 +120,9 @@ private async IAsyncEnumerable InternalInvokeAsync( Instructions = $"{this.Instructions}\n{options?.AdditionalInstructions}", StoredOutputEnabled = agentThread.StoreEnabled, }; - if (agentThread.StoreEnabled && agentThread.Id != null) + if (agentThread.StoreEnabled && agentThread.ResponseId != null) { - creationOptions.PreviousResponseId = agentThread.Id; + creationOptions.PreviousResponseId = agentThread.ResponseId; } var clientResult = await this.Client.CreateResponseAsync(inputItems, creationOptions, cancellationToken).ConfigureAwait(false); diff --git a/dotnet/src/Agents/OpenAI/OpenAIResponseAgentThread.cs b/dotnet/src/Agents/OpenAI/OpenAIResponseAgentThread.cs index b8688e1e7f43..feef7428e608 100644 --- a/dotnet/src/Agents/OpenAI/OpenAIResponseAgentThread.cs +++ b/dotnet/src/Agents/OpenAI/OpenAIResponseAgentThread.cs @@ -76,9 +76,6 @@ public OpenAIResponseAgentThread(OpenAIResponseClient client, string id, bool en throw new InvalidOperationException("This thread has been deleted and cannot be recreated."); } - // Enable storing - this.StoreEnabled = true; - // Id will not be available until after a message is sent return Task.FromResult(null); } diff --git a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseResponsesAgentTest.cs b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseResponsesAgentTest.cs index 3ce762447ca0..cb9c98d7c05e 100644 --- a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseResponsesAgentTest.cs +++ b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseResponsesAgentTest.cs @@ -28,7 +28,7 @@ protected BaseResponsesAgentTest(ITestOutputHelper output) : base(output) this.Client = new(model: "gpt-4o", credential: new ApiKeyCredential(TestConfiguration.OpenAI.ApiKey), options: options); } - protected bool EnableLogging { get; set; } = true; + protected bool EnableLogging { get; set; } = false; /// protected override OpenAIResponseClient Client { get; } From 1c8aa6ef7762b2a592a284cab7254debc1c46a49 Mon Sep 17 00:00:00 2001 From: Mark Wallace <127216156+markwallace-microsoft@users.noreply.github.com> Date: Mon, 14 Apr 2025 19:16:21 +0100 Subject: [PATCH 3/3] .Net: Use ChatHistoryAgentThread with OpenAIResponseAgent (#11499) ### Motivation and Context Response to this suggestion: _Instead of having a StoreEnabled on the agentThread, consider supporting two thread types, e.g. ChatHistoryAgentThread and OpenAIResponseAgentThread._ ### Description ### Contribution Checklist - [ ] The code builds clean without any errors or warnings - [ ] The PR follows the [SK Contribution Guidelines](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md) and the [pre-submission formatting script](https://github.com/microsoft/semantic-kernel/blob/main/CONTRIBUTING.md#development-scripts) raises no violations - [ ] All unit tests pass, and I have added new tests where possible - [ ] I didn't break anyone :smile: --- .../Step02_ConversationState.cs | 1 - dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj | 1 + .../src/Agents/OpenAI/OpenAIResponseAgent.cs | 52 ++++++++++++++----- .../OpenAI/OpenAIResponseAgentThread.cs | 46 ++++------------ .../OpenAI/OpenAIResponseAgentThreadTests.cs | 6 +-- 5 files changed, 52 insertions(+), 54 deletions(-) diff --git a/dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step02_ConversationState.cs b/dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step02_ConversationState.cs index 35d6c367ec97..15bb1134403d 100644 --- a/dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step02_ConversationState.cs +++ b/dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step02_ConversationState.cs @@ -98,7 +98,6 @@ public async Task ManageConversationStateWithResponseApiAsync() await foreach (AgentResponseItem responseItem in responseItems) { agentThread = responseItem.Thread; - this.Output.WriteLine(agentThread.Id); WriteAgentChatMessage(responseItem.Message); } } diff --git a/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj b/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj index 4a29c6e5de28..d0587839eaea 100644 --- a/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj +++ b/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj @@ -35,6 +35,7 @@ + diff --git a/dotnet/src/Agents/OpenAI/OpenAIResponseAgent.cs b/dotnet/src/Agents/OpenAI/OpenAIResponseAgent.cs index 7212f9c8b22e..5c977ff8ccc9 100644 --- a/dotnet/src/Agents/OpenAI/OpenAIResponseAgent.cs +++ b/dotnet/src/Agents/OpenAI/OpenAIResponseAgent.cs @@ -14,7 +14,7 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI; /// -/// Represents a specialization based on Open AI Assistant / GPT. +/// Represents a specialization based on OpenAI Response API. /// [ExcludeFromCodeCoverage] public sealed class OpenAIResponseAgent : Agent @@ -45,11 +45,7 @@ public override async IAsyncEnumerable> In { Verify.NotNull(messages); - var agentThread = await this.EnsureThreadExistsWithMessagesAsync( - messages, - thread, - () => new OpenAIResponseAgentThread(this.Client, this.StoreEnabled), - cancellationToken).ConfigureAwait(false); + var agentThread = await this.EnsureThreadExistsWithMessagesAsync(messages, thread, cancellationToken).ConfigureAwait(false); // Invoke responses with the updated chat history. var chatHistory = new ChatHistory(); @@ -97,10 +93,20 @@ protected override Task RestoreChannelAsync(string channelState, C } #region private + private async Task EnsureThreadExistsWithMessagesAsync(ICollection messages, AgentThread? thread, CancellationToken cancellationToken) + { + if (this.StoreEnabled) + { + return await this.EnsureThreadExistsWithMessagesAsync(messages, thread, () => new OpenAIResponseAgentThread(this.Client), cancellationToken).ConfigureAwait(false); + } + + return await this.EnsureThreadExistsWithMessagesAsync(messages, thread, () => new ChatHistoryAgentThread(), cancellationToken).ConfigureAwait(false); + } + private async IAsyncEnumerable InternalInvokeAsync( string? agentName, ChatHistory history, - OpenAIResponseAgentThread agentThread, + AgentThread agentThread, AgentInvokeOptions? options, [EnumeratorCancellation] CancellationToken cancellationToken) { @@ -110,7 +116,7 @@ private async IAsyncEnumerable InternalInvokeAsync( if (!this.StoreEnabled) { // Use the thread chat history - overrideHistory = [.. agentThread.ChatHistory, .. history]; + overrideHistory = [.. this.GetChatHistory(agentThread), .. history]; } var inputItems = overrideHistory.Select(c => c.ToResponseItem()); @@ -118,11 +124,11 @@ private async IAsyncEnumerable InternalInvokeAsync( { EndUserId = this.GetDisplayName(), Instructions = $"{this.Instructions}\n{options?.AdditionalInstructions}", - StoredOutputEnabled = agentThread.StoreEnabled, + StoredOutputEnabled = this.StoreEnabled, }; - if (agentThread.StoreEnabled && agentThread.ResponseId != null) + if (this.StoreEnabled && agentThread.Id != null) { - creationOptions.PreviousResponseId = agentThread.ResponseId; + creationOptions.PreviousResponseId = agentThread.Id; } var clientResult = await this.Client.CreateResponseAsync(inputItems, creationOptions, cancellationToken).ConfigureAwait(false); @@ -130,8 +136,7 @@ private async IAsyncEnumerable InternalInvokeAsync( if (this.StoreEnabled) { - // Update the response id - agentThread.ResponseId = response.Id; + this.UpdateResponseId(agentThread, response.Id); } var messages = response.OutputItems.Select(o => o.ToChatMessageContent()); @@ -143,5 +148,26 @@ private async IAsyncEnumerable InternalInvokeAsync( yield return message; } } + + private ChatHistory GetChatHistory(AgentThread agentThread) + { + if (agentThread is ChatHistoryAgentThread chatHistoryAgentThread) + { + return chatHistoryAgentThread.ChatHistory; + } + + throw new InvalidOperationException("The agent thread is not a ChatHistoryAgentThread."); + } + + private void UpdateResponseId(AgentThread agentThread, string id) + { + if (agentThread is OpenAIResponseAgentThread openAIResponseAgentThread) + { + openAIResponseAgentThread.ResponseId = id; + return; + } + + throw new InvalidOperationException("The agent thread is not an OpenAIResponseAgentThread."); + } #endregion } diff --git a/dotnet/src/Agents/OpenAI/OpenAIResponseAgentThread.cs b/dotnet/src/Agents/OpenAI/OpenAIResponseAgentThread.cs index feef7428e608..28445d8a069b 100644 --- a/dotnet/src/Agents/OpenAI/OpenAIResponseAgentThread.cs +++ b/dotnet/src/Agents/OpenAI/OpenAIResponseAgentThread.cs @@ -6,65 +6,49 @@ using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; -using Microsoft.SemanticKernel.ChatCompletion; using OpenAI.Responses; namespace Microsoft.SemanticKernel.Agents.OpenAI; /// -/// Represents a conversation thread for an OpenAI responses-based agent. +/// Represents a conversation thread for an OpenAI Response API based agent when store is enabled. /// [ExcludeFromCodeCoverage] public sealed class OpenAIResponseAgentThread : AgentThread { private readonly OpenAIResponseClient _client; - private readonly ChatHistory _chatHistory = new(); private bool _isDeleted = false; /// /// Initializes a new instance of the class. /// /// The agents client to use for interacting with responses. - /// Enable storing messages on the server. - public OpenAIResponseAgentThread(OpenAIResponseClient client, bool enableStore = false) + public OpenAIResponseAgentThread(OpenAIResponseClient client) { Verify.NotNull(client); this._client = client; - this.StoreEnabled = enableStore; } /// /// Initializes a new instance of the class that resumes an existing response. /// /// The agents client to use for interacting with responses. - /// The ID of an existing response to resume. - /// Enable storing messages on the server. - public OpenAIResponseAgentThread(OpenAIResponseClient client, string id, bool enableStore = false) + /// The ID of an existing response to resume. + public OpenAIResponseAgentThread(OpenAIResponseClient client, string responseId) { Verify.NotNull(client); - Verify.NotNull(id); + Verify.NotNull(responseId); this._client = client; - this.ResponseId = id; - this.StoreEnabled = enableStore; + this.ResponseId = responseId; } - /// - /// Storing of messages is enabled. - /// - public bool StoreEnabled { get; private set; } = false; - /// /// The current response id. /// internal string? ResponseId { get; set; } - /// - /// The current chat history. - /// - internal ChatHistory ChatHistory => this._chatHistory; - /// public override string? Id => this.ResponseId; @@ -93,7 +77,6 @@ protected override Task DeleteInternalAsync(CancellationToken cancellationToken throw new InvalidOperationException("This thread cannot be deleted, since it has not been created."); } - this._chatHistory.Clear(); this._isDeleted = true; return Task.CompletedTask; @@ -107,12 +90,6 @@ protected override Task OnNewMessageInternalAsync(ChatMessageContent newMessage, throw new InvalidOperationException("This thread has been deleted and cannot be used anymore."); } - // Keep track of locally - if (string.IsNullOrEmpty(this.ResponseId)) - { - this._chatHistory.Add(newMessage); - } - return Task.CompletedTask; } @@ -124,7 +101,7 @@ public async IAsyncEnumerable GetMessagesAsync([EnumeratorCa throw new InvalidOperationException("This thread has been deleted and cannot be used anymore."); } - if (this.StoreEnabled && !string.IsNullOrEmpty(this.ResponseId)) + if (!string.IsNullOrEmpty(this.ResponseId)) { var options = new ResponseItemCollectionOptions(); var collectionResult = this._client.GetResponseInputItemsAsync(this.ResponseId, options, cancellationToken).ConfigureAwait(false); @@ -133,12 +110,7 @@ public async IAsyncEnumerable GetMessagesAsync([EnumeratorCa yield return responseItem.ToChatMessageContent(); } } - else - { - foreach (var message in this._chatHistory) - { - yield return message; - } - } + + yield break; } } diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIResponseAgentThreadTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIResponseAgentThreadTests.cs index 3fd424351584..3c0eb684bd51 100644 --- a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIResponseAgentThreadTests.cs +++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIResponseAgentThreadTests.cs @@ -23,7 +23,7 @@ public void ConstructorShouldVerifyParams() // Arrange & Act & Assert Assert.Throws(() => new OpenAIResponseAgentThread(null!)); Assert.Throws(() => new OpenAIResponseAgentThread(null!, "threadId")); - Assert.Throws(() => new OpenAIResponseAgentThread(this.Client, id: null!)); + Assert.Throws(() => new OpenAIResponseAgentThread(this.Client, responseId: null!)); var agentThread = new OpenAIResponseAgentThread(this.Client); Assert.NotNull(agentThread); @@ -47,7 +47,7 @@ public void ConstructorForResumingThreadShouldUseParams() /// Verify returned when store is disabled. /// [Fact] - public async Task VerifyGetMessagesWhenStoreDisabledAsync() + public async Task VerifyGetMessagesWhenThreadIsUnusedAsync() { // Arrange var thread = new OpenAIResponseAgentThread(this.Client); @@ -72,7 +72,7 @@ public async Task VerifyGetMessagesWhenStoreEnabledAsync() new HttpResponseMessage(System.Net.HttpStatusCode.OK) { Content = new StringContent(MessagesResponse) } ); var responseId = "resp_67e8ff743ea08191b085bea42b4d83e809a3a922c4f4221b"; - var thread = new OpenAIResponseAgentThread(this.Client, id: responseId, enableStore: true); + var thread = new OpenAIResponseAgentThread(this.Client, responseId: responseId); // Act var messages = thread.GetMessagesAsync();