diff --git a/dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step01_OpenAIResponseAgent.cs b/dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step01_OpenAIResponseAgent.cs
new file mode 100644
index 000000000000..ecfe7895e2a8
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step01_OpenAIResponseAgent.cs
@@ -0,0 +1,90 @@
+// Copyright (c) Microsoft. All rights reserved.
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+
+namespace GettingStarted.OpenAIResponseAgents;
+
+///
+/// This example demonstrates using .
+///
+public class Step01_OpenAIResponseAgent(ITestOutputHelper output) : BaseResponsesAgentTest(output)
+{
+ [Fact]
+ public async Task UseOpenAIResponseAgentAsync()
+ {
+ // Define the agent
+ OpenAIResponseAgent agent = new(this.Client)
+ {
+ Name = "ResponseAgent",
+ Instructions = "Answer all queries in English and French.",
+ };
+
+ // Invoke the agent and output the response
+ var responseItems = agent.InvokeAsync("What is the capital of France?");
+ await foreach (ChatMessageContent responseItem in responseItems)
+ {
+ WriteAgentChatMessage(responseItem);
+ }
+ }
+
+ [Fact]
+ public async Task UseOpenAIResponseAgentWithMessagesAsync()
+ {
+ // Define the agent
+ OpenAIResponseAgent agent = new(this.Client)
+ {
+ Name = "ResponseAgent",
+ Instructions = "Answer all queries in English and French."
+ };
+
+ ICollection messages =
+ [
+ new ChatMessageContent(AuthorRole.User, "What is the capital of France?"),
+ new ChatMessageContent(AuthorRole.User, "What is the capital of Ireland?")
+ ];
+
+ // Invoke the agent and output the response
+ var responseItems = agent.InvokeAsync(messages);
+ await foreach (ChatMessageContent responseItem in responseItems)
+ {
+ WriteAgentChatMessage(responseItem);
+ }
+ }
+
+ [Fact]
+ public async Task UseOpenAIResponseAgentWithThreadedConversationAsync()
+ {
+ // Define the agent
+ OpenAIResponseAgent agent = new(this.Client)
+ {
+ Name = "ResponseAgent",
+ Instructions = "Answer all queries in the users preferred language.",
+ };
+
+ string[] messages =
+ [
+ "My name is Bob and my preferred language is French.",
+ "What is the capital of France?",
+ "What is the capital of Spain?",
+ "What is the capital of Italy?"
+ ];
+
+ // Initial thread can be null as it will be automatically created
+ AgentThread? agentThread = null;
+
+ // Invoke the agent and output the response
+ foreach (string message in messages)
+ {
+ var responseItems = agent.InvokeAsync(new ChatMessageContent(AuthorRole.User, message), agentThread);
+ await foreach (AgentResponseItem responseItem in responseItems)
+ {
+ // Update the thread so the previous response id is used
+ agentThread = responseItem.Thread;
+
+ WriteAgentChatMessage(responseItem.Message);
+ }
+ }
+ }
+}
diff --git a/dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step02_ConversationState.cs b/dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step02_ConversationState.cs
new file mode 100644
index 000000000000..15bb1134403d
--- /dev/null
+++ b/dotnet/samples/GettingStartedWithAgents/OpenAIResponse/Step02_ConversationState.cs
@@ -0,0 +1,122 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+
+namespace GettingStarted.OpenAIResponseAgents;
+
+///
+/// This example demonstrates how to manage conversation state during a model interaction using .
+/// OpenAI provides a few ways to manage conversation state, which is important for preserving information across multiple messages or turns in a conversation.
+///
+public class Step02_ConversationState(ITestOutputHelper output) : BaseResponsesAgentTest(output)
+{
+ [Fact]
+ public async Task ManuallyConstructPastConversationAsync()
+ {
+ // Define the agent
+ OpenAIResponseAgent agent = new(this.Client)
+ {
+ StoreEnabled = false,
+ };
+
+ ICollection messages =
+ [
+ new ChatMessageContent(AuthorRole.User, "knock knock."),
+ new ChatMessageContent(AuthorRole.Assistant, "Who's there?"),
+ new ChatMessageContent(AuthorRole.User, "Orange.")
+ ];
+ foreach (ChatMessageContent message in messages)
+ {
+ WriteAgentChatMessage(message);
+ }
+
+ // Invoke the agent and output the response
+ var responseItems = agent.InvokeAsync(messages);
+ await foreach (ChatMessageContent responseItem in responseItems)
+ {
+ WriteAgentChatMessage(responseItem);
+ }
+ }
+
+ [Fact]
+ public async Task ManuallyManageConversationStateWithResponsesChatCompletionApiAsync()
+ {
+ // Define the agent
+ OpenAIResponseAgent agent = new(this.Client)
+ {
+ StoreEnabled = false,
+ };
+
+ string[] messages =
+ [
+ "Tell me a joke?",
+ "Tell me another?",
+ ];
+
+ // Invoke the agent and output the response
+ AgentThread? agentThread = null;
+ foreach (string message in messages)
+ {
+ var userMessage = new ChatMessageContent(AuthorRole.User, message);
+ WriteAgentChatMessage(userMessage);
+
+ var responseItems = agent.InvokeAsync(userMessage, agentThread);
+ await foreach (AgentResponseItem responseItem in responseItems)
+ {
+ agentThread = responseItem.Thread;
+ WriteAgentChatMessage(responseItem.Message);
+ }
+ }
+ }
+
+ [Fact]
+ public async Task ManageConversationStateWithResponseApiAsync()
+ {
+ // Define the agent
+ OpenAIResponseAgent agent = new(this.Client)
+ {
+ StoreEnabled = true,
+ };
+
+ string[] messages =
+ [
+ "Tell me a joke?",
+ "Explain why this is funny.",
+ ];
+
+ // Invoke the agent and output the response
+ AgentThread? agentThread = null;
+ foreach (string message in messages)
+ {
+ var userMessage = new ChatMessageContent(AuthorRole.User, message);
+ WriteAgentChatMessage(userMessage);
+
+ var responseItems = agent.InvokeAsync(userMessage, agentThread);
+ await foreach (AgentResponseItem responseItem in responseItems)
+ {
+ agentThread = responseItem.Thread;
+ WriteAgentChatMessage(responseItem.Message);
+ }
+ }
+
+ // Display the contents in the latest thread
+ if (agentThread is not null)
+ {
+ this.Output.WriteLine("\n\nResponse Thread Messages\n");
+ var responseAgentThread = agentThread as OpenAIResponseAgentThread;
+ var threadMessages = responseAgentThread?.GetMessagesAsync();
+ if (threadMessages is not null)
+ {
+ await foreach (var threadMessage in threadMessages)
+ {
+ WriteAgentChatMessage(threadMessage);
+ }
+ }
+
+ await agentThread.DeleteAsync();
+ }
+ }
+}
diff --git a/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj b/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj
index 4a29c6e5de28..d0587839eaea 100644
--- a/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj
+++ b/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj
@@ -35,6 +35,7 @@
+
diff --git a/dotnet/src/Agents/OpenAI/Extensions/ChatContentMessageExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/ChatContentMessageExtensions.cs
index 5cd0055d8456..3fbeb3d870aa 100644
--- a/dotnet/src/Agents/OpenAI/Extensions/ChatContentMessageExtensions.cs
+++ b/dotnet/src/Agents/OpenAI/Extensions/ChatContentMessageExtensions.cs
@@ -1,8 +1,10 @@
// Copyright (c) Microsoft. All rights reserved.
+using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.SemanticKernel.Agents.OpenAI.Internal;
using OpenAI.Assistants;
+using OpenAI.Responses;
namespace Microsoft.SemanticKernel.Agents.OpenAI;
@@ -33,4 +35,21 @@ public static IEnumerable ToThreadInitializationMes
{
return messages.Select(message => message.ToThreadInitializationMessage());
}
+
+ ///
+ /// Converts a instance to a .
+ ///
+ /// The chat message content to convert.
+ /// A instance.
+ public static ResponseItem ToResponseItem(this ChatMessageContent message)
+ {
+ return message.Role.Label switch
+ {
+ "system" => ResponseItem.CreateSystemMessageItem(message.Content),
+ "user" => ResponseItem.CreateUserMessageItem(message.Content),
+ "developer" => ResponseItem.CreateDeveloperMessageItem(message.Content),
+ "assistant" => ResponseItem.CreateAssistantMessageItem(message.Content),
+ _ => throw new NotSupportedException($"Unsupported role {message.Role.Label}. Only system, user, developer or assistant roles are allowed."),
+ };
+ }
}
diff --git a/dotnet/src/Agents/OpenAI/Extensions/ResponseItemExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/ResponseItemExtensions.cs
new file mode 100644
index 000000000000..07290327c1c3
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/Extensions/ResponseItemExtensions.cs
@@ -0,0 +1,69 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
+using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Responses;
+
+namespace Microsoft.SemanticKernel.Agents.OpenAI;
+
+[ExcludeFromCodeCoverage]
+internal static class ResponseItemExtensions
+{
+ ///
+ /// Converts a instance to a .
+ ///
+ /// The response item to convert.
+ /// A instance.
+ public static ChatMessageContent ToChatMessageContent(this ResponseItem item)
+ {
+ if (item is MessageResponseItem messageResponseItem)
+ {
+ var role = messageResponseItem.Role.ToAuthorRole();
+ var collection = messageResponseItem.Content.ToChatMessageContentItemCollection();
+
+ return new ChatMessageContent(role, collection, innerContent: messageResponseItem);
+ }
+ throw new InvalidOperationException();
+ }
+
+ #region private
+ private static ChatMessageContentItemCollection ToChatMessageContentItemCollection(this IList content)
+ {
+ var collection = new ChatMessageContentItemCollection();
+ foreach (var part in content)
+ {
+ if (part.Kind == ResponseContentPartKind.OutputText || part.Kind == ResponseContentPartKind.InputText)
+ {
+ collection.Add(new TextContent(part.Text, innerContent: part));
+ }
+ else if (part.Kind == ResponseContentPartKind.InputImage)
+ {
+ collection.Add(new FileReferenceContent(part.InputImageFileId, innerContent: part));
+ }
+ else if (part.Kind == ResponseContentPartKind.InputFile)
+ {
+ collection.Add(new FileReferenceContent(part.InputFileId, innerContent: part));
+ }
+ else if (part.Kind == ResponseContentPartKind.Refusal)
+ {
+ collection.Add(new TextContent(part.Refusal, innerContent: part));
+ }
+ }
+ return collection;
+ }
+
+ private static AuthorRole ToAuthorRole(this MessageRole messageRole)
+ {
+ return messageRole switch
+ {
+ MessageRole.Assistant => AuthorRole.Assistant,
+ MessageRole.Developer => AuthorRole.Developer,
+ MessageRole.System => AuthorRole.System,
+ MessageRole.User => AuthorRole.User,
+ _ => new AuthorRole("unknown"),
+ };
+ }
+ #endregion
+}
diff --git a/dotnet/src/Agents/OpenAI/OpenAIResponseAgent.cs b/dotnet/src/Agents/OpenAI/OpenAIResponseAgent.cs
new file mode 100644
index 000000000000..5c977ff8ccc9
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/OpenAIResponseAgent.cs
@@ -0,0 +1,173 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
+using System.Linq;
+using System.Runtime.CompilerServices;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.SemanticKernel.Agents.Extensions;
+using Microsoft.SemanticKernel.ChatCompletion;
+using OpenAI.Responses;
+
+namespace Microsoft.SemanticKernel.Agents.OpenAI;
+
+///
+/// Represents a specialization based on OpenAI Response API.
+///
+[ExcludeFromCodeCoverage]
+public sealed class OpenAIResponseAgent : Agent
+{
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The OpenAI provider for accessing the Responses API service.
+ public OpenAIResponseAgent(OpenAIResponseClient client)
+ {
+ Verify.NotNull(client);
+
+ this.Client = client;
+ }
+
+ ///
+ /// Expose client for additional use.
+ ///
+ public OpenAIResponseClient Client { get; }
+
+ ///
+ /// Storing of messages is enabled.
+ ///
+ public bool StoreEnabled { get; init; } = true;
+
+ ///
+ public override async IAsyncEnumerable> InvokeAsync(ICollection messages, AgentThread? thread = null, AgentInvokeOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ {
+ Verify.NotNull(messages);
+
+ var agentThread = await this.EnsureThreadExistsWithMessagesAsync(messages, thread, cancellationToken).ConfigureAwait(false);
+
+ // Invoke responses with the updated chat history.
+ var chatHistory = new ChatHistory();
+ chatHistory.AddRange(messages);
+ var invokeResults = this.InternalInvokeAsync(
+ this.Name,
+ chatHistory,
+ agentThread,
+ options,
+ cancellationToken);
+
+ // Notify the thread of new messages and return them to the caller.
+ await foreach (var result in invokeResults.ConfigureAwait(false))
+ {
+ await this.NotifyThreadOfNewMessage(agentThread, result, cancellationToken).ConfigureAwait(false);
+ yield return new(result, agentThread);
+ }
+ }
+
+ ///
+ public override IAsyncEnumerable> InvokeStreamingAsync(ICollection messages, AgentThread? thread = null, AgentInvokeOptions? options = null, CancellationToken cancellationToken = default)
+ {
+ throw new NotImplementedException();
+ }
+
+ ///
+ [Experimental("SKEXP0110")]
+ protected override Task CreateChannelAsync(CancellationToken cancellationToken)
+ {
+ throw new NotImplementedException();
+ }
+
+ ///
+ [Experimental("SKEXP0110")]
+ protected override IEnumerable GetChannelKeys()
+ {
+ throw new NotImplementedException();
+ }
+
+ ///
+ [Experimental("SKEXP0110")]
+ protected override Task RestoreChannelAsync(string channelState, CancellationToken cancellationToken)
+ {
+ throw new NotImplementedException();
+ }
+
+ #region private
+ private async Task EnsureThreadExistsWithMessagesAsync(ICollection messages, AgentThread? thread, CancellationToken cancellationToken)
+ {
+ if (this.StoreEnabled)
+ {
+ return await this.EnsureThreadExistsWithMessagesAsync(messages, thread, () => new OpenAIResponseAgentThread(this.Client), cancellationToken).ConfigureAwait(false);
+ }
+
+ return await this.EnsureThreadExistsWithMessagesAsync(messages, thread, () => new ChatHistoryAgentThread(), cancellationToken).ConfigureAwait(false);
+ }
+
+ private async IAsyncEnumerable InternalInvokeAsync(
+ string? agentName,
+ ChatHistory history,
+ AgentThread agentThread,
+ AgentInvokeOptions? options,
+ [EnumeratorCancellation] CancellationToken cancellationToken)
+ {
+ var kernel = options?.Kernel ?? this.Kernel;
+
+ var overrideHistory = history;
+ if (!this.StoreEnabled)
+ {
+ // Use the thread chat history
+ overrideHistory = [.. this.GetChatHistory(agentThread), .. history];
+ }
+
+ var inputItems = overrideHistory.Select(c => c.ToResponseItem());
+ var creationOptions = new ResponseCreationOptions()
+ {
+ EndUserId = this.GetDisplayName(),
+ Instructions = $"{this.Instructions}\n{options?.AdditionalInstructions}",
+ StoredOutputEnabled = this.StoreEnabled,
+ };
+ if (this.StoreEnabled && agentThread.Id != null)
+ {
+ creationOptions.PreviousResponseId = agentThread.Id;
+ }
+
+ var clientResult = await this.Client.CreateResponseAsync(inputItems, creationOptions, cancellationToken).ConfigureAwait(false);
+ var response = clientResult.Value;
+
+ if (this.StoreEnabled)
+ {
+ this.UpdateResponseId(agentThread, response.Id);
+ }
+
+ var messages = response.OutputItems.Select(o => o.ToChatMessageContent());
+
+ foreach (ChatMessageContent message in messages)
+ {
+ message.AuthorName = this.Name;
+
+ yield return message;
+ }
+ }
+
+ private ChatHistory GetChatHistory(AgentThread agentThread)
+ {
+ if (agentThread is ChatHistoryAgentThread chatHistoryAgentThread)
+ {
+ return chatHistoryAgentThread.ChatHistory;
+ }
+
+ throw new InvalidOperationException("The agent thread is not a ChatHistoryAgentThread.");
+ }
+
+ private void UpdateResponseId(AgentThread agentThread, string id)
+ {
+ if (agentThread is OpenAIResponseAgentThread openAIResponseAgentThread)
+ {
+ openAIResponseAgentThread.ResponseId = id;
+ return;
+ }
+
+ throw new InvalidOperationException("The agent thread is not an OpenAIResponseAgentThread.");
+ }
+ #endregion
+}
diff --git a/dotnet/src/Agents/OpenAI/OpenAIResponseAgentThread.cs b/dotnet/src/Agents/OpenAI/OpenAIResponseAgentThread.cs
new file mode 100644
index 000000000000..28445d8a069b
--- /dev/null
+++ b/dotnet/src/Agents/OpenAI/OpenAIResponseAgentThread.cs
@@ -0,0 +1,116 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
+using System.Runtime.CompilerServices;
+using System.Threading;
+using System.Threading.Tasks;
+using OpenAI.Responses;
+
+namespace Microsoft.SemanticKernel.Agents.OpenAI;
+
+///
+/// Represents a conversation thread for an OpenAI Response API based agent when store is enabled.
+///
+[ExcludeFromCodeCoverage]
+public sealed class OpenAIResponseAgentThread : AgentThread
+{
+ private readonly OpenAIResponseClient _client;
+ private bool _isDeleted = false;
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The agents client to use for interacting with responses.
+ public OpenAIResponseAgentThread(OpenAIResponseClient client)
+ {
+ Verify.NotNull(client);
+
+ this._client = client;
+ }
+
+ ///
+ /// Initializes a new instance of the class that resumes an existing response.
+ ///
+ /// The agents client to use for interacting with responses.
+ /// The ID of an existing response to resume.
+ public OpenAIResponseAgentThread(OpenAIResponseClient client, string responseId)
+ {
+ Verify.NotNull(client);
+ Verify.NotNull(responseId);
+
+ this._client = client;
+ this.ResponseId = responseId;
+ }
+
+ ///
+ /// The current response id.
+ ///
+ internal string? ResponseId { get; set; }
+
+ ///
+ public override string? Id => this.ResponseId;
+
+ ///
+ protected override Task CreateInternalAsync(CancellationToken cancellationToken = default)
+ {
+ if (this._isDeleted)
+ {
+ throw new InvalidOperationException("This thread has been deleted and cannot be recreated.");
+ }
+
+ // Id will not be available until after a message is sent
+ return Task.FromResult(null);
+ }
+
+ ///
+ protected override Task DeleteInternalAsync(CancellationToken cancellationToken = default)
+ {
+ if (this._isDeleted)
+ {
+ return Task.CompletedTask;
+ }
+
+ if (this.ResponseId is null)
+ {
+ throw new InvalidOperationException("This thread cannot be deleted, since it has not been created.");
+ }
+
+ this._isDeleted = true;
+
+ return Task.CompletedTask;
+ }
+
+ ///
+ protected override Task OnNewMessageInternalAsync(ChatMessageContent newMessage, CancellationToken cancellationToken = default)
+ {
+ if (this._isDeleted)
+ {
+ throw new InvalidOperationException("This thread has been deleted and cannot be used anymore.");
+ }
+
+ return Task.CompletedTask;
+ }
+
+ ///
+ public async IAsyncEnumerable GetMessagesAsync([EnumeratorCancellation] CancellationToken cancellationToken = default)
+ {
+ if (this._isDeleted)
+ {
+ throw new InvalidOperationException("This thread has been deleted and cannot be used anymore.");
+ }
+
+ if (!string.IsNullOrEmpty(this.ResponseId))
+ {
+ var options = new ResponseItemCollectionOptions();
+ var collectionResult = this._client.GetResponseInputItemsAsync(this.ResponseId, options, cancellationToken).ConfigureAwait(false);
+ await foreach (var responseItem in collectionResult)
+ {
+ yield return responseItem.ToChatMessageContent();
+ }
+ }
+
+ yield break;
+ }
+}
diff --git a/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj b/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj
index a0222fac89cf..62d9e3c8e935 100644
--- a/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj
+++ b/dotnet/src/Agents/UnitTests/Agents.UnitTests.csproj
@@ -47,7 +47,8 @@
-
+
+
\ No newline at end of file
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/BaseOpenAIResponseClientTest.cs b/dotnet/src/Agents/UnitTests/OpenAI/BaseOpenAIResponseClientTest.cs
new file mode 100644
index 000000000000..c2462ea1128f
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/OpenAI/BaseOpenAIResponseClientTest.cs
@@ -0,0 +1,41 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.ClientModel;
+using System.ClientModel.Primitives;
+using System.Net.Http;
+using OpenAI;
+using OpenAI.Responses;
+
+namespace SemanticKernel.Agents.UnitTests.OpenAI;
+
+///
+/// Base tests which use
+///
+public class BaseOpenAIResponseClientTest : IDisposable
+{
+ internal MultipleHttpMessageHandlerStub MessageHandlerStub { get; }
+ internal HttpClient HttpClient { get; }
+ internal OpenAIResponseClient Client { get; }
+
+ internal BaseOpenAIResponseClientTest()
+ {
+ this.MessageHandlerStub = new MultipleHttpMessageHandlerStub();
+ this.HttpClient = new HttpClient(this.MessageHandlerStub, disposeHandler: false);
+
+ var clientOptions = new OpenAIClientOptions()
+ {
+ Transport = new HttpClientPipelineTransport(this.HttpClient)
+ };
+ this.Client = new OpenAIResponseClient("model", new ApiKeyCredential("apiKey"), clientOptions);
+ }
+
+ ///
+ public void Dispose()
+ {
+ this.MessageHandlerStub.Dispose();
+ this.HttpClient.Dispose();
+
+ GC.SuppressFinalize(this);
+ }
+}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIResponseAgentTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIResponseAgentTests.cs
new file mode 100644
index 000000000000..86f8ff4f486b
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIResponseAgentTests.cs
@@ -0,0 +1,132 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Linq;
+using System.Net.Http;
+using System.Threading.Tasks;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.OpenAI;
+
+///
+/// Tests for the class.
+///
+public sealed class OpenAIResponseAgentTests : BaseOpenAIResponseClientTest
+{
+ ///
+ /// Tests that the constructor verifies parameters and throws when necessary.
+ ///
+ [Fact]
+ public void ConstructorShouldVerifyParams()
+ {
+ // Arrange & Act & Assert
+ Assert.Throws(() => new OpenAIResponseAgent(null!));
+ }
+
+ ///
+ /// Tests that the OpenAIResponseAgent.InvokeAsync verifies parameters and throws when necessary.
+ ///
+ [Fact]
+ public void InvokeShouldVerifyParams()
+ {
+ // Arrange
+ var agent = new OpenAIResponseAgent(this.Client);
+ string nullString = null!;
+ ChatMessageContent nullMessage = null!;
+
+ // Act & Assert
+ Assert.Throws(() => agent.InvokeAsync(nullString));
+ Assert.Throws(() => agent.InvokeAsync(nullMessage));
+ }
+
+ ///
+ /// Tests that the OpenAIResponseAgent.InvokeAsync verifies parameters and throws when necessary.
+ ///
+ [Fact]
+ public async Task VerifyInvokeAsync()
+ {
+ // Arrange
+ this.MessageHandlerStub.ResponsesToReturn.Add(
+ new HttpResponseMessage(System.Net.HttpStatusCode.OK) { Content = new StringContent(InvokeResponse) }
+ );
+ var agent = new OpenAIResponseAgent(this.Client)
+ {
+ Name = "ResponseAgent",
+ Instructions = "Answer all queries in English and French.",
+ };
+
+ // Act
+ var responseItems = agent.InvokeAsync("What is the capital of France?");
+
+ // Assert
+ Assert.NotNull(responseItems);
+ var items = await responseItems!.ToListAsync>();
+ Assert.Single(items);
+ Assert.Equal("The capital of France is Paris.\n\nLa capitale de la France est Paris.", items[0].Message.Content);
+ }
+
+ #region private
+ private const string InvokeResponse =
+ """
+ {
+ "id": "resp_67e8f5cf761c8191aab763d1e901e3410bbdc4b8da506cd2",
+ "object": "response",
+ "created_at": 1743320527,
+ "status": "completed",
+ "error": null,
+ "incomplete_details": null,
+ "instructions": "Answer all queries in English and French.",
+ "max_output_tokens": null,
+ "model": "gpt-4o-2024-08-06",
+ "output": [
+ {
+ "type": "message",
+ "id": "msg_67e8f5cfbe688191a428ed9869c39fea0bbdc4b8da506cd2",
+ "status": "completed",
+ "role": "assistant",
+ "content": [
+ {
+ "type": "output_text",
+ "text": "The capital of France is Paris.\n\nLa capitale de la France est Paris.",
+ "annotations": []
+ }
+ ]
+ }
+ ],
+ "parallel_tool_calls": true,
+ "previous_response_id": null,
+ "reasoning": {
+ "effort": null,
+ "generate_summary": null
+ },
+ "store": true,
+ "temperature": 1.0,
+ "text": {
+ "format": {
+ "type": "text"
+ }
+ },
+ "tool_choice": "auto",
+ "tools": [],
+ "top_p": 1.0,
+ "truncation": "disabled",
+ "usage": {
+ "input_tokens": 26,
+ "input_tokens_details": {
+ "cached_tokens": 0
+ },
+ "output_tokens": 16,
+ "output_tokens_details": {
+ "reasoning_tokens": 0
+ },
+ "total_tokens": 42
+ },
+ "user": "ResponseAgent",
+ "metadata": {}
+ }
+ """;
+ #endregion
+}
diff --git a/dotnet/src/Agents/UnitTests/OpenAI/OpenAIResponseAgentThreadTests.cs b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIResponseAgentThreadTests.cs
new file mode 100644
index 000000000000..3c0eb684bd51
--- /dev/null
+++ b/dotnet/src/Agents/UnitTests/OpenAI/OpenAIResponseAgentThreadTests.cs
@@ -0,0 +1,137 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System;
+using System.Linq;
+using System.Net.Http;
+using System.Threading.Tasks;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Xunit;
+
+namespace SemanticKernel.Agents.UnitTests.OpenAI;
+
+///
+/// Tests for the class.
+///
+public sealed class OpenAIResponseAgentThreadTests : BaseOpenAIResponseClientTest
+{
+ ///
+ /// Tests that the constructor verifies parameters and throws when necessary.
+ ///
+ [Fact]
+ public void ConstructorShouldVerifyParams()
+ {
+ // Arrange & Act & Assert
+ Assert.Throws(() => new OpenAIResponseAgentThread(null!));
+ Assert.Throws(() => new OpenAIResponseAgentThread(null!, "threadId"));
+ Assert.Throws(() => new OpenAIResponseAgentThread(this.Client, responseId: null!));
+
+ var agentThread = new OpenAIResponseAgentThread(this.Client);
+ Assert.NotNull(agentThread);
+ }
+
+ ///
+ /// Tests that the constructor for resuming a thread uses the provided parameters.
+ ///
+ [Fact]
+ public void ConstructorForResumingThreadShouldUseParams()
+ {
+ // Arrange & Act
+ var agentThread = new OpenAIResponseAgentThread(this.Client, "threadId");
+
+ // Assert
+ Assert.NotNull(agentThread);
+ Assert.Equal("threadId", agentThread.Id);
+ }
+
+ ///
+ /// Verify returned when store is disabled.
+ ///
+ [Fact]
+ public async Task VerifyGetMessagesWhenThreadIsUnusedAsync()
+ {
+ // Arrange
+ var thread = new OpenAIResponseAgentThread(this.Client);
+
+ // Act
+ var messages = thread.GetMessagesAsync();
+
+ // Assert
+ Assert.NotNull(messages);
+ var messagesList = await messages!.ToListAsync();
+ Assert.Empty(messagesList);
+ }
+
+ ///
+ /// Verify returned when store is disabled.
+ ///
+ [Fact]
+ public async Task VerifyGetMessagesWhenStoreEnabledAsync()
+ {
+ // Arrange
+ this.MessageHandlerStub.ResponsesToReturn.Add(
+ new HttpResponseMessage(System.Net.HttpStatusCode.OK) { Content = new StringContent(MessagesResponse) }
+ );
+ var responseId = "resp_67e8ff743ea08191b085bea42b4d83e809a3a922c4f4221b";
+ var thread = new OpenAIResponseAgentThread(this.Client, responseId: responseId);
+
+ // Act
+ var messages = thread.GetMessagesAsync();
+
+ // Assert
+ Assert.NotNull(messages);
+ var messagesList = await messages!.ToListAsync();
+ Assert.Equal(3, messagesList.Count);
+ }
+
+ #region private
+ private const string MessagesResponse =
+ """
+ {
+ "object": "list",
+ "data": [
+ {
+ "type": "message",
+ "id": "msg_67e8ff7445408191af5d6f4a87a9d3fe09a3a922c4f4221b",
+ "status": "completed",
+ "role": "user",
+ "content": [
+ {
+ "type": "input_text",
+ "text": "Explain why this is funny."
+ }
+ ]
+ },
+ {
+ "type": "message",
+ "id": "msg_67e8ff73be188191b871e41c2816355209a3a922c4f4221b",
+ "status": "completed",
+ "role": "assistant",
+ "content": [
+ {
+ "type": "output_text",
+ "text": "Why don't skeletons fight each other?\n\nThey don't have the guts!",
+ "annotations": []
+ }
+ ]
+ },
+ {
+ "type": "message",
+ "id": "msg_67e8ff7258a081919e7964ac7b344bc909a3a922c4f4221b",
+ "status": "completed",
+ "role": "user",
+ "content": [
+ {
+ "type": "input_text",
+ "text": "Tell me a joke?"
+ }
+ ]
+ }
+ ],
+ "first_id": "msg_67e8ff7445408191af5d6f4a87a9d3fe09a3a922c4f4221b",
+ "last_id": "msg_67e8ff7258a081919e7964ac7b344bc909a3a922c4f4221b",
+ "has_more": false
+ }
+
+ """;
+ #endregion
+}
diff --git a/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseResponsesAgentTest.cs b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseResponsesAgentTest.cs
new file mode 100644
index 000000000000..cb9c98d7c05e
--- /dev/null
+++ b/dotnet/src/InternalUtilities/samples/AgentUtilities/BaseResponsesAgentTest.cs
@@ -0,0 +1,35 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.ClientModel;
+using System.ClientModel.Primitives;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using OpenAI;
+using OpenAI.Responses;
+
+///
+/// Base class for samples that demonstrate the usage of .
+///
+public abstract class BaseResponsesAgentTest : BaseAgentsTest
+{
+ protected BaseResponsesAgentTest(ITestOutputHelper output) : base(output)
+ {
+ var options = new OpenAIClientOptions();
+ if (this.EnableLogging)
+ {
+ options.MessageLoggingPolicy = new MessageLoggingPolicy(new()
+ {
+ EnableLogging = true,
+ EnableMessageLogging = true,
+ EnableMessageContentLogging = true,
+ LoggerFactory = this.LoggerFactory
+ });
+ }
+
+ this.Client = new(model: "gpt-4o", credential: new ApiKeyCredential(TestConfiguration.OpenAI.ApiKey), options: options);
+ }
+
+ protected bool EnableLogging { get; set; } = false;
+
+ ///
+ protected override OpenAIResponseClient Client { get; }
+}