Skip to content

.NET Agent - Add AIContext to OpenAIResponseAgent #12456

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 17 commits into from
Jun 19, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
135 changes: 135 additions & 0 deletions dotnet/samples/Concepts/Agents/OpenAIResponseAgent_Whiteboard.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
// Copyright (c) Microsoft. All rights reserved.

using Azure.AI.OpenAI;
using Azure.Identity;
using Microsoft.Extensions.AI;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Agents;
using Microsoft.SemanticKernel.Agents.OpenAI;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Memory;

namespace Agents;

#pragma warning disable SKEXP0130 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.

/// <summary>
/// Demonstrate creation of <see cref="OpenAIResponseAgent"/> and
/// adding whiteboarding capabilities, where the most relevant information from the conversation is captured on a whiteboard.
/// This is useful for long running conversations where the conversation history may need to be truncated
/// over time, but you do not want to agent to lose context.
/// </summary>
public class OpenAIResponseAgent_Whiteboard(ITestOutputHelper output) : BaseResponsesAgentTest(output)
{
private const string AgentName = "FriendlyAssistant";
private const string AgentInstructions = "You are a friendly assistant";

/// <summary>
/// Shows how to allow an agent to use a whiteboard for storing the most important information
/// from a long running, truncated conversation.
/// </summary>
[Fact]
private async Task UseWhiteboardForShortTermMemory()
{
IChatClient chatClient = new AzureOpenAIClient(new Uri(TestConfiguration.AzureOpenAI.Endpoint), new AzureCliCredential())
.GetChatClient(TestConfiguration.AzureOpenAI.ChatDeploymentName)
.AsIChatClient();

// Create the whiteboard.
WhiteboardProvider whiteboardProvider = new(chatClient);

OpenAIResponseAgent agent = new(this.Client)
{
Name = AgentName,
Instructions = AgentInstructions,
Arguments = new KernelArguments(new PromptExecutionSettings { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }),
StoreEnabled = false,
};

// Create the agent with our sample plugin.
agent.Kernel.Plugins.AddFromType<VMPlugin>();

// Create a chat history reducer that we can use to truncate the chat history
// when it goes over 3 items.
ChatHistoryTruncationReducer chatHistoryReducer = new(3, 3);

// Create a thread for the agent and add the whiteboard to it.
ChatHistoryAgentThread agentThread = new();
agentThread.AIContextProviders.Add(whiteboardProvider);

// Simulate a conversation with the agent.
// We will also truncate the conversation once it goes over a few items.
await InvokeWithConsoleWriteLine("Hello");
await InvokeWithConsoleWriteLine("I'd like to create a VM?");
await InvokeWithConsoleWriteLine("I want it to have 3 cores.");
await InvokeWithConsoleWriteLine("I want it to have 48GB of RAM.");
await InvokeWithConsoleWriteLine("I want it to have a 500GB Harddrive.");
await InvokeWithConsoleWriteLine("I want it in Europe.");
await InvokeWithConsoleWriteLine("Can you make it Linux and call it 'ContosoVM'.");
await InvokeWithConsoleWriteLine("OK, let's call it `ContosoFinanceVM_Europe` instead.");
await InvokeWithConsoleWriteLine("Thanks, now I want to create another VM.");
await InvokeWithConsoleWriteLine("Make all the options the same as the last one, except for the region, which should be North America, and the name, which should be 'ContosoFinanceVM_NorthAmerica'.");

async Task InvokeWithConsoleWriteLine(string message)
{
// Print the user input.
Console.WriteLine($"User: {message}");

// Invoke the agent.
ChatMessageContent response = await agent.InvokeAsync(message, agentThread).FirstAsync();

// Print the response.
this.WriteAgentChatMessage(response);

// Make sure any async whiteboard processing is complete before we print out its contents.
await whiteboardProvider.WhenProcessingCompleteAsync();

// Print out the whiteboard contents.
Console.WriteLine("Whiteboard contents:");
foreach (var item in whiteboardProvider.CurrentWhiteboardContent)
{
Console.WriteLine($"- {item}");
}
Console.WriteLine();

// Truncate the chat history if it gets too big.
await agentThread.ChatHistory.ReduceInPlaceAsync(chatHistoryReducer, CancellationToken.None);
}
}

private sealed class VMPlugin
{
[KernelFunction]
public Task<VMCreateResult> CreateVM(Region region, OperatingSystem os, string name, int numberOfCores, int memorySizeInGB, int hddSizeInGB)
{
if (name == "ContosoVM")
{
throw new Exception("VM name already exists");
}

return Task.FromResult(new VMCreateResult { VMId = Guid.NewGuid().ToString() });
}
}

public class VMCreateResult
{
public string VMId { get; set; } = string.Empty;
}

private enum Region
{
NorthAmerica,
SouthAmerica,
Europe,
Asia,
Africa,
Australia
}

private enum OperatingSystem
{
Windows,
Linux,
MacOS
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -43,12 +43,13 @@ public static IEnumerable<ThreadInitializationMessage> ToThreadInitializationMes
/// <returns>A <see cref="ResponseItem"/> instance.</returns>
public static ResponseItem ToResponseItem(this ChatMessageContent message)
{
string content = message.Content ?? string.Empty;
return message.Role.Label switch
{
"system" => ResponseItem.CreateSystemMessageItem(message.Content),
"user" => ResponseItem.CreateUserMessageItem(message.Content),
"developer" => ResponseItem.CreateDeveloperMessageItem(message.Content),
"assistant" => ResponseItem.CreateAssistantMessageItem(message.Content),
"system" => ResponseItem.CreateSystemMessageItem(content),
"user" => ResponseItem.CreateUserMessageItem(content),
"developer" => ResponseItem.CreateDeveloperMessageItem(content),
"assistant" => ResponseItem.CreateAssistantMessageItem(content),
_ => throw new NotSupportedException($"Unsupported role {message.Role.Label}. Only system, user, developer or assistant roles are allowed."),
};
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,10 @@ internal static ResponseCreationOptions CreateOptions(
return responseAgentInvokeOptions.ResponseCreationOptions;
}

var responseTools = agent.Kernel.Plugins
var responseTools = agent.GetKernel(invokeOptions).Plugins
.SelectMany(kp => kp.Select(kf => kf.ToResponseTool(kp.Name)));

var creationOptions = new ResponseCreationOptions()
var creationOptions = new ResponseCreationOptions
{
EndUserId = agent.GetDisplayName(),
Instructions = $"{agent.Instructions}\n{invokeOptions?.AdditionalInstructions}",
Expand Down
53 changes: 23 additions & 30 deletions dotnet/src/Agents/OpenAI/Internal/ResponseThreadActions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.SemanticKernel.Agents.Extensions;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.FunctionCalling;
using OpenAI.Responses;
Expand All @@ -27,14 +28,13 @@ internal static async IAsyncEnumerable<ChatMessageContent> InvokeAsync(
AgentInvokeOptions options,
[EnumeratorCancellation] CancellationToken cancellationToken)
{
var kernel = options?.Kernel ?? agent.Kernel;
var responseAgentThread = agentThread as OpenAIResponseAgentThread;

var overrideHistory = history;
if (!agent.StoreEnabled)
{
// Use the thread chat history
overrideHistory = [.. GetChatHistory(agentThread, history)];
overrideHistory = [.. GetChatHistory(agentThread)];
}

var creationOptions = ResponseCreationOptionsFactory.CreateOptions(agent, agentThread, options);
Expand Down Expand Up @@ -86,7 +86,7 @@ await functionProcessor.InvokeFunctionCallsAsync(
message,
(_) => true,
functionOptions,
kernel,
agent.GetKernel(options),
isStreaming: false,
cancellationToken).ToArrayAsync(cancellationToken).ConfigureAwait(false);
var functionOutputItems = functionResults.Select(fr => ResponseItem.CreateFunctionCallOutputItem(fr.CallId, fr.Result?.ToString() ?? string.Empty)).ToList();
Expand All @@ -102,8 +102,7 @@ await functionProcessor.InvokeFunctionCallsAsync(
}

// Return the function results as a message
ChatMessageContentItemCollection items = new();
items.AddRange(functionResults);
ChatMessageContentItemCollection items = [.. functionResults];
ChatMessageContent functionResultMessage = new()
{
Role = AuthorRole.Tool,
Expand All @@ -121,14 +120,13 @@ internal static async IAsyncEnumerable<StreamingChatMessageContent> InvokeStream
AgentInvokeOptions? options,
[EnumeratorCancellation] CancellationToken cancellationToken)
{
var kernel = options?.Kernel ?? agent.Kernel;
var responseAgentThread = agentThread as OpenAIResponseAgentThread;

var overrideHistory = history;
if (!agent.StoreEnabled)
{
// Use the thread chat history
overrideHistory = [.. GetChatHistory(agentThread, history)];
overrideHistory = [.. GetChatHistory(agentThread)];
}

var inputItems = overrideHistory.Select(m => m.ToResponseItem()).ToList();
Expand Down Expand Up @@ -161,6 +159,7 @@ internal static async IAsyncEnumerable<StreamingChatMessageContent> InvokeStream
case StreamingResponseCompletedUpdate completedUpdate:
response = completedUpdate.Response;
message = completedUpdate.Response.ToChatMessageContent();
overrideHistory.Add(message);
break;

case StreamingResponseOutputItemAddedUpdate outputItemAddedUpdate:
Expand Down Expand Up @@ -258,8 +257,8 @@ await functionProcessor.InvokeFunctionCallsAsync(
message!,
(_) => true,
functionOptions,
kernel,
isStreaming: false,
agent.GetKernel(options),
isStreaming: true,
cancellationToken).ToArrayAsync(cancellationToken).ConfigureAwait(false);
var functionOutputItems = functionResults.Select(fr => ResponseItem.CreateFunctionCallOutputItem(fr.CallId, fr.Result?.ToString() ?? string.Empty)).ToList();

Expand All @@ -274,21 +273,26 @@ await functionProcessor.InvokeFunctionCallsAsync(
}

// Return the function results as a message
ChatMessageContentItemCollection items = new();
items.AddRange(functionResults);
StreamingChatMessageContent functionResultMessage = new(
AuthorRole.Tool,
content: null)
ChatMessageContentItemCollection items = [.. functionResults];
ChatMessageContent functionResultMessage = new()
{
ModelId = modelId,
InnerContent = functionCallUpdateContent,
Items = [functionCallUpdateContent],
Role = AuthorRole.Tool,
Items = items,
};
yield return functionResultMessage;
StreamingChatMessageContent streamingFunctionResultMessage =
new(AuthorRole.Tool,
content: null)
{
ModelId = modelId,
InnerContent = functionCallUpdateContent,
Items = [functionCallUpdateContent],
};
overrideHistory.Add(functionResultMessage);
yield return streamingFunctionResultMessage;
}
}

private static ChatHistory GetChatHistory(AgentThread agentThread, ChatHistory history)
private static ChatHistory GetChatHistory(AgentThread agentThread)
{
if (agentThread is ChatHistoryAgentThread chatHistoryAgentThread)
{
Expand All @@ -298,17 +302,6 @@ private static ChatHistory GetChatHistory(AgentThread agentThread, ChatHistory h
throw new InvalidOperationException("The agent thread is not a ChatHistoryAgentThread.");
}

private static void UpdateResponseId(AgentThread agentThread, string id)
{
if (agentThread is OpenAIResponseAgentThread openAIResponseAgentThread)
{
openAIResponseAgentThread.ResponseId = id;
return;
}

throw new InvalidOperationException("The agent thread is not an OpenAIResponseAgentThread.");
}

private static void ThrowIfIncompleteOrFailed(OpenAIResponseAgent agent, OpenAIResponse response)
{
if (response.Status == ResponseStatus.Incomplete || response.Status == ResponseStatus.Failed)
Expand Down
Loading
Loading