diff --git a/.github/upgrades/prompts/SemanticKernelToAgentFramework.md b/.github/upgrades/prompts/SemanticKernelToAgentFramework.md
index a8c3dcb0a6..44985bba98 100644
--- a/.github/upgrades/prompts/SemanticKernelToAgentFramework.md
+++ b/.github/upgrades/prompts/SemanticKernelToAgentFramework.md
@@ -839,7 +839,7 @@ var agentOptions = new ChatClientAgentRunOptions(new ChatOptions
{
MaxOutputTokens = 8000,
// Breaking glass to access provider-specific options
- RawRepresentationFactory = (_) => new OpenAI.Responses.ResponseCreationOptions()
+ RawRepresentationFactory = (_) => new OpenAI.Responses.CreateResponseOptions()
{
ReasoningOptions = new()
{
diff --git a/dotnet/.editorconfig b/dotnet/.editorconfig
index c0d0d04fe9..fea0183976 100644
--- a/dotnet/.editorconfig
+++ b/dotnet/.editorconfig
@@ -209,6 +209,7 @@ dotnet_diagnostic.CA2000.severity = none # Call System.IDisposable.Dispose on ob
dotnet_diagnostic.CA2225.severity = none # Operator overloads have named alternates
dotnet_diagnostic.CA2227.severity = none # Change to be read-only by removing the property setter
dotnet_diagnostic.CA2249.severity = suggestion # Consider using 'Contains' method instead of 'IndexOf' method
+dotnet_diagnostic.CA2252.severity = none # Requires preview
dotnet_diagnostic.CA2253.severity = none # Named placeholders in the logging message template should not be comprised of only numeric characters
dotnet_diagnostic.CA2253.severity = none # Named placeholders in the logging message template should not be comprised of only numeric characters
dotnet_diagnostic.CA2263.severity = suggestion # Use generic overload
diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props
index c7a051bf83..4825a42921 100644
--- a/dotnet/Directory.Packages.props
+++ b/dotnet/Directory.Packages.props
@@ -19,10 +19,10 @@
-
-
+
+
-
+
@@ -61,10 +61,9 @@
-
-
-
-
+
+
+
@@ -101,11 +100,10 @@
-
-
+
diff --git a/dotnet/samples/AgentWebChat/AgentWebChat.AgentHost/AgentWebChat.AgentHost.csproj b/dotnet/samples/AgentWebChat/AgentWebChat.AgentHost/AgentWebChat.AgentHost.csproj
index 3f2a832a69..f71becf5d3 100644
--- a/dotnet/samples/AgentWebChat/AgentWebChat.AgentHost/AgentWebChat.AgentHost.csproj
+++ b/dotnet/samples/AgentWebChat/AgentWebChat.AgentHost/AgentWebChat.AgentHost.csproj
@@ -25,7 +25,6 @@
-
diff --git a/dotnet/samples/AgentWebChat/AgentWebChat.AgentHost/Utilities/ChatClientExtensions.cs b/dotnet/samples/AgentWebChat/AgentWebChat.AgentHost/Utilities/ChatClientExtensions.cs
index 6cd3d888c8..7b1f2d86b4 100644
--- a/dotnet/samples/AgentWebChat/AgentWebChat.AgentHost/Utilities/ChatClientExtensions.cs
+++ b/dotnet/samples/AgentWebChat/AgentWebChat.AgentHost/Utilities/ChatClientExtensions.cs
@@ -1,8 +1,6 @@
// Copyright (c) Microsoft. All rights reserved.
using AgentWebChat.AgentHost.Utilities;
-using Azure;
-using Azure.AI.Inference;
using Microsoft.Extensions.AI;
using OllamaSharp;
@@ -24,7 +22,6 @@ public static ChatClientBuilder AddChatClient(this IHostApplicationBuilder build
ClientChatProvider.Ollama => builder.AddOllamaClient(connectionName, connectionInfo),
ClientChatProvider.OpenAI => builder.AddOpenAIClient(connectionName, connectionInfo),
ClientChatProvider.AzureOpenAI => builder.AddAzureOpenAIClient(connectionName).AddChatClient(connectionInfo.SelectedModel),
- ClientChatProvider.AzureAIInference => builder.AddAzureInferenceClient(connectionName, connectionInfo),
_ => throw new NotSupportedException($"Unsupported provider: {connectionInfo.Provider}")
};
@@ -44,16 +41,6 @@ private static ChatClientBuilder AddOpenAIClient(this IHostApplicationBuilder bu
})
.AddChatClient(connectionInfo.SelectedModel);
- private static ChatClientBuilder AddAzureInferenceClient(this IHostApplicationBuilder builder, string connectionName, ChatClientConnectionInfo connectionInfo) =>
- builder.Services.AddChatClient(sp =>
- {
- var credential = new AzureKeyCredential(connectionInfo.AccessKey!);
-
- var client = new ChatCompletionsClient(connectionInfo.Endpoint, credential, new AzureAIInferenceClientOptions());
-
- return client.AsIChatClient(connectionInfo.SelectedModel);
- });
-
private static ChatClientBuilder AddOllamaClient(this IHostApplicationBuilder builder, string connectionName, ChatClientConnectionInfo connectionInfo)
{
var httpKey = $"{connectionName}_http";
@@ -83,7 +70,6 @@ public static ChatClientBuilder AddKeyedChatClient(this IHostApplicationBuilder
ClientChatProvider.Ollama => builder.AddKeyedOllamaClient(connectionName, connectionInfo),
ClientChatProvider.OpenAI => builder.AddKeyedOpenAIClient(connectionName, connectionInfo),
ClientChatProvider.AzureOpenAI => builder.AddKeyedAzureOpenAIClient(connectionName).AddKeyedChatClient(connectionName, connectionInfo.SelectedModel),
- ClientChatProvider.AzureAIInference => builder.AddKeyedAzureInferenceClient(connectionName, connectionInfo),
_ => throw new NotSupportedException($"Unsupported provider: {connectionInfo.Provider}")
};
@@ -103,16 +89,6 @@ private static ChatClientBuilder AddKeyedOpenAIClient(this IHostApplicationBuild
})
.AddKeyedChatClient(connectionName, connectionInfo.SelectedModel);
- private static ChatClientBuilder AddKeyedAzureInferenceClient(this IHostApplicationBuilder builder, string connectionName, ChatClientConnectionInfo connectionInfo) =>
- builder.Services.AddKeyedChatClient(connectionName, sp =>
- {
- var credential = new AzureKeyCredential(connectionInfo.AccessKey!);
-
- var client = new ChatCompletionsClient(connectionInfo.Endpoint, credential, new AzureAIInferenceClientOptions());
-
- return client.AsIChatClient(connectionInfo.SelectedModel);
- });
-
private static ChatClientBuilder AddKeyedOllamaClient(this IHostApplicationBuilder builder, string connectionName, ChatClientConnectionInfo connectionInfo)
{
var httpKey = $"{connectionName}_http";
diff --git a/dotnet/samples/AgentWebChat/AgentWebChat.Web/OpenAIResponsesAgentClient.cs b/dotnet/samples/AgentWebChat/AgentWebChat.Web/OpenAIResponsesAgentClient.cs
index 7cc85b97c3..d0121a6165 100644
--- a/dotnet/samples/AgentWebChat/AgentWebChat.Web/OpenAIResponsesAgentClient.cs
+++ b/dotnet/samples/AgentWebChat/AgentWebChat.Web/OpenAIResponsesAgentClient.cs
@@ -27,7 +27,7 @@ public override async IAsyncEnumerable RunStreamingAsync
Transport = new HttpClientPipelineTransport(httpClient)
};
- var openAiClient = new OpenAIResponseClient(model: agentName, credential: new ApiKeyCredential("dummy-key"), options: options).AsIChatClient();
+ var openAiClient = new ResponsesClient(model: agentName, credential: new ApiKeyCredential("dummy-key"), options: options).AsIChatClient();
var chatOptions = new ChatOptions()
{
ConversationId = threadId
diff --git a/dotnet/samples/GettingStarted/AgentProviders/Agent_With_AzureOpenAIResponses/Program.cs b/dotnet/samples/GettingStarted/AgentProviders/Agent_With_AzureOpenAIResponses/Program.cs
index 83d5619382..5ce85b2b91 100644
--- a/dotnet/samples/GettingStarted/AgentProviders/Agent_With_AzureOpenAIResponses/Program.cs
+++ b/dotnet/samples/GettingStarted/AgentProviders/Agent_With_AzureOpenAIResponses/Program.cs
@@ -13,7 +13,7 @@
AIAgent agent = new AzureOpenAIClient(
new Uri(endpoint),
new AzureCliCredential())
- .GetOpenAIResponseClient(deploymentName)
+ .GetResponsesClient(deploymentName)
.CreateAIAgent(instructions: "You are good at telling jokes.", name: "Joker");
// Invoke the agent and output the text result.
diff --git a/dotnet/samples/GettingStarted/AgentProviders/Agent_With_OpenAIResponses/Program.cs b/dotnet/samples/GettingStarted/AgentProviders/Agent_With_OpenAIResponses/Program.cs
index df53ba8869..b0d0285928 100644
--- a/dotnet/samples/GettingStarted/AgentProviders/Agent_With_OpenAIResponses/Program.cs
+++ b/dotnet/samples/GettingStarted/AgentProviders/Agent_With_OpenAIResponses/Program.cs
@@ -11,7 +11,7 @@
AIAgent agent = new OpenAIClient(
apiKey)
- .GetOpenAIResponseClient(model)
+ .GetResponsesClient(model)
.CreateAIAgent(instructions: "You are good at telling jokes.", name: "Joker");
// Invoke the agent and output the text result.
diff --git a/dotnet/samples/GettingStarted/AgentWithOpenAI/Agent_OpenAI_Step02_Reasoning/Program.cs b/dotnet/samples/GettingStarted/AgentWithOpenAI/Agent_OpenAI_Step02_Reasoning/Program.cs
index e06a8cc76f..aa18fdd286 100644
--- a/dotnet/samples/GettingStarted/AgentWithOpenAI/Agent_OpenAI_Step02_Reasoning/Program.cs
+++ b/dotnet/samples/GettingStarted/AgentWithOpenAI/Agent_OpenAI_Step02_Reasoning/Program.cs
@@ -11,11 +11,11 @@
var model = Environment.GetEnvironmentVariable("OPENAI_MODEL") ?? "gpt-5";
var client = new OpenAIClient(apiKey)
- .GetOpenAIResponseClient(model)
+ .GetResponsesClient(model)
.AsIChatClient().AsBuilder()
.ConfigureOptions(o =>
{
- o.RawRepresentationFactory = _ => new ResponseCreationOptions()
+ o.RawRepresentationFactory = _ => new CreateResponseOptions()
{
ReasoningOptions = new()
{
diff --git a/dotnet/samples/GettingStarted/AgentWithOpenAI/Agent_OpenAI_Step04_CreateFromOpenAIResponseClient/OpenAIResponseClientAgent.cs b/dotnet/samples/GettingStarted/AgentWithOpenAI/Agent_OpenAI_Step04_CreateFromOpenAIResponseClient/OpenAIResponseClientAgent.cs
index 456de02836..622223307c 100644
--- a/dotnet/samples/GettingStarted/AgentWithOpenAI/Agent_OpenAI_Step04_CreateFromOpenAIResponseClient/OpenAIResponseClientAgent.cs
+++ b/dotnet/samples/GettingStarted/AgentWithOpenAI/Agent_OpenAI_Step04_CreateFromOpenAIResponseClient/OpenAIResponseClientAgent.cs
@@ -16,13 +16,13 @@ public class OpenAIResponseClientAgent : DelegatingAIAgent
///
/// Initialize an instance of .
///
- /// Instance of
+ /// Instance of
/// Optional instructions for the agent.
/// Optional name for the agent.
/// Optional description for the agent.
/// Optional instance of
public OpenAIResponseClientAgent(
- OpenAIResponseClient client,
+ ResponsesClient client,
string? instructions = null,
string? name = null,
string? description = null,
@@ -39,11 +39,11 @@ public OpenAIResponseClientAgent(
///
/// Initialize an instance of .
///
- /// Instance of
+ /// Instance of
/// Options to create the agent.
/// Optional instance of
public OpenAIResponseClientAgent(
- OpenAIResponseClient client, ChatClientAgentOptions options, ILoggerFactory? loggerFactory = null) :
+ ResponsesClient client, ChatClientAgentOptions options, ILoggerFactory? loggerFactory = null) :
base(new ChatClientAgent((client ?? throw new ArgumentNullException(nameof(client))).AsIChatClient(), options, loggerFactory))
{
}
@@ -55,8 +55,8 @@ public OpenAIResponseClientAgent(
/// The conversation thread to continue with this invocation. If not provided, creates a new thread. The thread will be mutated with the provided messages and agent response.
/// Optional parameters for agent invocation.
/// The to monitor for cancellation requests. The default is .
- /// A containing the list of items.
- public virtual async Task RunAsync(
+ /// A containing the list of items.
+ public virtual async Task RunAsync(
IEnumerable messages,
AgentThread? thread = null,
AgentRunOptions? options = null,
@@ -74,7 +74,7 @@ public virtual async Task RunAsync(
/// The conversation thread to continue with this invocation. If not provided, creates a new thread. The thread will be mutated with the provided messages and agent response.
/// Optional parameters for agent invocation.
/// The to monitor for cancellation requests. The default is .
- /// A containing the list of items.
+ /// A containing the list of items.
public virtual async IAsyncEnumerable RunStreamingAsync(
IEnumerable messages,
AgentThread? thread = null,
diff --git a/dotnet/samples/GettingStarted/AgentWithOpenAI/Agent_OpenAI_Step04_CreateFromOpenAIResponseClient/Program.cs b/dotnet/samples/GettingStarted/AgentWithOpenAI/Agent_OpenAI_Step04_CreateFromOpenAIResponseClient/Program.cs
index 89a96bc0fb..5c229cc57d 100644
--- a/dotnet/samples/GettingStarted/AgentWithOpenAI/Agent_OpenAI_Step04_CreateFromOpenAIResponseClient/Program.cs
+++ b/dotnet/samples/GettingStarted/AgentWithOpenAI/Agent_OpenAI_Step04_CreateFromOpenAIResponseClient/Program.cs
@@ -1,6 +1,6 @@
// Copyright (c) Microsoft. All rights reserved.
-// This sample demonstrates how to create OpenAIResponseClientAgent directly from an OpenAIResponseClient instance.
+// This sample demonstrates how to create OpenAIResponseClientAgent directly from an ResponsesClient instance.
using OpenAI;
using OpenAI.Responses;
@@ -9,16 +9,16 @@
var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new InvalidOperationException("OPENAI_API_KEY is not set.");
var model = Environment.GetEnvironmentVariable("OPENAI_MODEL") ?? "gpt-4o-mini";
-// Create an OpenAIResponseClient directly from OpenAIClient
-OpenAIResponseClient responseClient = new OpenAIClient(apiKey).GetOpenAIResponseClient(model);
+// Create a ResponsesClient directly from OpenAIClient
+ResponsesClient responseClient = new OpenAIClient(apiKey).GetResponsesClient(model);
-// Create an agent directly from the OpenAIResponseClient using OpenAIResponseClientAgent
+// Create an agent directly from the ResponsesClient using OpenAIResponseClientAgent
OpenAIResponseClientAgent agent = new(responseClient, instructions: "You are good at telling jokes.", name: "Joker");
ResponseItem userMessage = ResponseItem.CreateUserMessageItem("Tell me a joke about a pirate.");
// Invoke the agent and output the text result.
-OpenAIResponse response = await agent.RunAsync([userMessage]);
+ResponseResult response = await agent.RunAsync([userMessage]);
Console.WriteLine(response.GetOutputText());
// Invoke the agent with streaming support.
diff --git a/dotnet/samples/GettingStarted/AgentWithOpenAI/Agent_OpenAI_Step05_Conversation/Program.cs b/dotnet/samples/GettingStarted/AgentWithOpenAI/Agent_OpenAI_Step05_Conversation/Program.cs
index 9f81a27dda..8aebebdfa0 100644
--- a/dotnet/samples/GettingStarted/AgentWithOpenAI/Agent_OpenAI_Step05_Conversation/Program.cs
+++ b/dotnet/samples/GettingStarted/AgentWithOpenAI/Agent_OpenAI_Step05_Conversation/Program.cs
@@ -21,8 +21,8 @@
OpenAIClient openAIClient = new(apiKey);
ConversationClient conversationClient = openAIClient.GetConversationClient();
-// Create an agent directly from the OpenAIResponseClient using OpenAIResponseClientAgent
-ChatClientAgent agent = new(openAIClient.GetOpenAIResponseClient(model).AsIChatClient(), instructions: "You are a helpful assistant.", name: "ConversationAgent");
+// Create an agent directly from the ResponsesClient using OpenAIResponseClientAgent
+ChatClientAgent agent = new(openAIClient.GetResponsesClient(model).AsIChatClient(), instructions: "You are a helpful assistant.", name: "ConversationAgent");
ClientResult createConversationResult = await conversationClient.CreateConversationAsync(BinaryContent.Create(BinaryData.FromString("{}")));
diff --git a/dotnet/samples/GettingStarted/Agents/Agent_Step13_BackgroundResponsesWithToolsAndPersistence/Program.cs b/dotnet/samples/GettingStarted/Agents/Agent_Step13_BackgroundResponsesWithToolsAndPersistence/Program.cs
index 41493f6d79..29dc347b4a 100644
--- a/dotnet/samples/GettingStarted/Agents/Agent_Step13_BackgroundResponsesWithToolsAndPersistence/Program.cs
+++ b/dotnet/samples/GettingStarted/Agents/Agent_Step13_BackgroundResponsesWithToolsAndPersistence/Program.cs
@@ -22,7 +22,7 @@
AIAgent agent = new AzureOpenAIClient(
new Uri(endpoint),
new AzureCliCredential())
- .GetOpenAIResponseClient(deploymentName)
+ .GetResponsesClient(deploymentName)
.CreateAIAgent(
name: "SpaceNovelWriter",
instructions: "You are a space novel writer. Always research relevant facts and generate character profiles for the main characters before writing novels." +
diff --git a/dotnet/samples/GettingStarted/Agents/Agent_Step17_BackgroundResponses/Program.cs b/dotnet/samples/GettingStarted/Agents/Agent_Step17_BackgroundResponses/Program.cs
index 510a5dfbd0..3e172a95b5 100644
--- a/dotnet/samples/GettingStarted/Agents/Agent_Step17_BackgroundResponses/Program.cs
+++ b/dotnet/samples/GettingStarted/Agents/Agent_Step17_BackgroundResponses/Program.cs
@@ -13,7 +13,7 @@
AIAgent agent = new AzureOpenAIClient(
new Uri(endpoint),
new AzureCliCredential())
- .GetOpenAIResponseClient(deploymentName)
+ .GetResponsesClient(deploymentName)
.CreateAIAgent();
// Enable background responses (only supported by OpenAI Responses at this time).
diff --git a/dotnet/samples/GettingStarted/FoundryAgents/FoundryAgents_Step15_ComputerUse/Program.cs b/dotnet/samples/GettingStarted/FoundryAgents/FoundryAgents_Step15_ComputerUse/Program.cs
index 05fb39bbf4..ff4f57924a 100644
--- a/dotnet/samples/GettingStarted/FoundryAgents/FoundryAgents_Step15_ComputerUse/Program.cs
+++ b/dotnet/samples/GettingStarted/FoundryAgents/FoundryAgents_Step15_ComputerUse/Program.cs
@@ -73,7 +73,7 @@ private static async Task InvokeComputerUseAgentAsync(AIAgent agent)
Dictionary screenshots = ComputerUseUtil.LoadScreenshotAssets();
ChatOptions chatOptions = new();
- ResponseCreationOptions responseCreationOptions = new()
+ CreateResponseOptions responseCreationOptions = new()
{
TruncationMode = ResponseTruncationMode.Auto
};
diff --git a/dotnet/samples/GettingStarted/ModelContextProtocol/ResponseAgent_Hosted_MCP/Program.cs b/dotnet/samples/GettingStarted/ModelContextProtocol/ResponseAgent_Hosted_MCP/Program.cs
index ba4249c765..13ee28d6a1 100644
--- a/dotnet/samples/GettingStarted/ModelContextProtocol/ResponseAgent_Hosted_MCP/Program.cs
+++ b/dotnet/samples/GettingStarted/ModelContextProtocol/ResponseAgent_Hosted_MCP/Program.cs
@@ -30,7 +30,7 @@
AIAgent agent = new AzureOpenAIClient(
new Uri(endpoint),
new AzureCliCredential())
- .GetOpenAIResponseClient(deploymentName)
+ .GetResponsesClient(deploymentName)
.CreateAIAgent(
instructions: "You answer questions by searching the Microsoft Learn content only.",
name: "MicrosoftLearnAgent",
@@ -57,7 +57,7 @@
AIAgent agentWithRequiredApproval = new AzureOpenAIClient(
new Uri(endpoint),
new AzureCliCredential())
- .GetOpenAIResponseClient(deploymentName)
+ .GetResponsesClient(deploymentName)
.CreateAIAgent(
instructions: "You answer questions by searching the Microsoft Learn content only.",
name: "MicrosoftLearnAgentWithApproval",
diff --git a/dotnet/samples/Purview/AgentWithPurview/Program.cs b/dotnet/samples/Purview/AgentWithPurview/Program.cs
index 842917b427..a4b27c47cd 100644
--- a/dotnet/samples/Purview/AgentWithPurview/Program.cs
+++ b/dotnet/samples/Purview/AgentWithPurview/Program.cs
@@ -27,7 +27,7 @@
using IChatClient client = new AzureOpenAIClient(
new Uri(endpoint),
new AzureCliCredential())
- .GetOpenAIResponseClient(deploymentName)
+ .GetResponsesClient(deploymentName)
.AsIChatClient()
.AsBuilder()
.WithPurview(browserCredential, new PurviewSettings("Agent Framework Test App"))
diff --git a/dotnet/src/Microsoft.Agents.AI.AzureAI/AzureAIProjectChatClient.cs b/dotnet/src/Microsoft.Agents.AI.AzureAI/AzureAIProjectChatClient.cs
index 8acafc8fc3..f31c570508 100644
--- a/dotnet/src/Microsoft.Agents.AI.AzureAI/AzureAIProjectChatClient.cs
+++ b/dotnet/src/Microsoft.Agents.AI.AzureAI/AzureAIProjectChatClient.cs
@@ -23,11 +23,6 @@ internal sealed class AzureAIProjectChatClient : DelegatingChatClient
private readonly AgentRecord? _agentRecord;
private readonly ChatOptions? _chatOptions;
private readonly AgentReference _agentReference;
- ///
- /// The usage of a no-op model is a necessary change to avoid OpenAIClients to throw exceptions when
- /// used with Azure AI Agents as the model used is now defined at the agent creation time.
- ///
- private const string NoOpModel = "no-op";
///
/// Initializes a new instance of the class.
@@ -42,7 +37,7 @@ internal sealed class AzureAIProjectChatClient : DelegatingChatClient
internal AzureAIProjectChatClient(AIProjectClient aiProjectClient, AgentReference agentReference, string? defaultModelId, ChatOptions? chatOptions)
: base(Throw.IfNull(aiProjectClient)
.GetProjectOpenAIClient()
- .GetOpenAIResponseClient(defaultModelId ?? NoOpModel)
+ .GetProjectResponsesClientForAgent(agentReference)
.AsIChatClient())
{
this._agentClient = aiProjectClient;
@@ -132,13 +127,15 @@ private ChatOptions GetAgentEnabledChatOptions(ChatOptions? options)
agentEnabledChatOptions.RawRepresentationFactory = (client) =>
{
- if (originalFactory?.Invoke(this) is not ResponseCreationOptions responseCreationOptions)
+ if (originalFactory?.Invoke(this) is not CreateResponseOptions responseCreationOptions)
{
- responseCreationOptions = new ResponseCreationOptions();
+ responseCreationOptions = new CreateResponseOptions();
}
- ResponseCreationOptionsExtensions.set_Agent(responseCreationOptions, this._agentReference);
- ResponseCreationOptionsExtensions.set_Model(responseCreationOptions, null);
+ responseCreationOptions.Agent = this._agentReference;
+#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
+ responseCreationOptions.Patch.Remove("$.model"u8);
+#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
return responseCreationOptions;
};
diff --git a/dotnet/src/Microsoft.Agents.AI.AzureAI/AzureAIProjectChatClientExtensions.cs b/dotnet/src/Microsoft.Agents.AI.AzureAI/AzureAIProjectChatClientExtensions.cs
index dfbdad8e98..7319bb13eb 100644
--- a/dotnet/src/Microsoft.Agents.AI.AzureAI/AzureAIProjectChatClientExtensions.cs
+++ b/dotnet/src/Microsoft.Agents.AI.AzureAI/AzureAIProjectChatClientExtensions.cs
@@ -400,7 +400,7 @@ public static ChatClientAgent CreateAIAgent(
};
// Attempt to capture breaking glass options from the raw representation factory that match the agent definition.
- if (options.ChatOptions?.RawRepresentationFactory?.Invoke(new NoOpChatClient()) is ResponseCreationOptions respCreationOptions)
+ if (options.ChatOptions?.RawRepresentationFactory?.Invoke(new NoOpChatClient()) is CreateResponseOptions respCreationOptions)
{
agentDefinition.ReasoningOptions = respCreationOptions.ReasoningOptions;
}
@@ -466,7 +466,7 @@ public static async Task CreateAIAgentAsync(
};
// Attempt to capture breaking glass options from the raw representation factory that match the agent definition.
- if (options.ChatOptions?.RawRepresentationFactory?.Invoke(new NoOpChatClient()) is ResponseCreationOptions respCreationOptions)
+ if (options.ChatOptions?.RawRepresentationFactory?.Invoke(new NoOpChatClient()) is CreateResponseOptions respCreationOptions)
{
agentDefinition.ReasoningOptions = respCreationOptions.ReasoningOptions;
}
diff --git a/dotnet/src/Microsoft.Agents.AI.Hosting.OpenAI/Responses/Models/ConversationReference.cs b/dotnet/src/Microsoft.Agents.AI.Hosting.OpenAI/Responses/Models/ConversationReference.cs
index dc38375331..d5a1d96240 100644
--- a/dotnet/src/Microsoft.Agents.AI.Hosting.OpenAI/Responses/Models/ConversationReference.cs
+++ b/dotnet/src/Microsoft.Agents.AI.Hosting.OpenAI/Responses/Models/ConversationReference.cs
@@ -84,22 +84,18 @@ public override void Write(Utf8JsonWriter writer, ConversationReference value, J
return;
}
- // If only ID is present and no metadata, serialize as a simple string
- if (value.Metadata is null || value.Metadata.Count == 0)
+ // Ideally if only ID is present and no metadata, we would serialize as a simple string.
+ // However, while a request's "conversation" property can be either a string or an object
+ // containing a string, a response's "conversation" property is always an object. Since
+ // here we don't know which scenario we're in, we always serialize as an object, which works
+ // in any scenario.
+ writer.WriteStartObject();
+ writer.WriteString("id", value.Id);
+ if (value.Metadata is not null)
{
- writer.WriteStringValue(value.Id);
- }
- else
- {
- // Otherwise, serialize as an object
- writer.WriteStartObject();
- writer.WriteString("id", value.Id);
- if (value.Metadata is not null)
- {
- writer.WritePropertyName("metadata");
- JsonSerializer.Serialize(writer, value.Metadata, OpenAIHostingJsonContext.Default.DictionaryStringString);
- }
- writer.WriteEndObject();
+ writer.WritePropertyName("metadata");
+ JsonSerializer.Serialize(writer, value.Metadata, OpenAIHostingJsonContext.Default.DictionaryStringString);
}
+ writer.WriteEndObject();
}
}
diff --git a/dotnet/src/Microsoft.Agents.AI.OpenAI/Extensions/AIAgentWithOpenAIExtensions.cs b/dotnet/src/Microsoft.Agents.AI.OpenAI/Extensions/AIAgentWithOpenAIExtensions.cs
index 4abc6915a6..d487ba00e1 100644
--- a/dotnet/src/Microsoft.Agents.AI.OpenAI/Extensions/AIAgentWithOpenAIExtensions.cs
+++ b/dotnet/src/Microsoft.Agents.AI.OpenAI/Extensions/AIAgentWithOpenAIExtensions.cs
@@ -73,22 +73,22 @@ public static AsyncCollectionResult RunStreamingA
}
///
- /// Runs the AI agent with a collection of OpenAI response items and returns the response as a native OpenAI .
+ /// Runs the AI agent with a collection of OpenAI response items and returns the response as a native OpenAI .
///
/// The AI agent to run.
/// The collection of OpenAI response items to send to the agent.
/// The conversation thread to continue with this invocation. If not provided, creates a new thread. The thread will be mutated with the provided messages and agent response.
/// Optional parameters for agent invocation.
/// The to monitor for cancellation requests. The default is .
- /// A representing the asynchronous operation that returns a native OpenAI response.
+ /// A representing the asynchronous operation that returns a native OpenAI response.
/// Thrown when or is .
- /// Thrown when the agent's response cannot be converted to an , typically when the underlying representation is not an OpenAI response.
+ /// Thrown when the agent's response cannot be converted to an , typically when the underlying representation is not an OpenAI response.
/// Thrown when any message in has a type that is not supported by the message conversion method.
///
/// This method converts the OpenAI response items to the Microsoft Extensions AI format using the appropriate conversion method,
- /// runs the agent with the converted message collection, and then extracts the native OpenAI from the response using .
+ /// runs the agent with the converted message collection, and then extracts the native OpenAI from the response using .
///
- public static async Task RunAsync(this AIAgent agent, IEnumerable messages, AgentThread? thread = null, AgentRunOptions? options = null, CancellationToken cancellationToken = default)
+ public static async Task RunAsync(this AIAgent agent, IEnumerable messages, AgentThread? thread = null, AgentRunOptions? options = null, CancellationToken cancellationToken = default)
{
Throw.IfNull(agent);
Throw.IfNull(messages);
diff --git a/dotnet/src/Microsoft.Agents.AI.OpenAI/Extensions/AgentRunResponseExtensions.cs b/dotnet/src/Microsoft.Agents.AI.OpenAI/Extensions/AgentRunResponseExtensions.cs
index 9a164d862b..44844e64f5 100644
--- a/dotnet/src/Microsoft.Agents.AI.OpenAI/Extensions/AgentRunResponseExtensions.cs
+++ b/dotnet/src/Microsoft.Agents.AI.OpenAI/Extensions/AgentRunResponseExtensions.cs
@@ -29,17 +29,17 @@ response.RawRepresentation as ChatCompletion ??
}
///
- /// Creates or extracts a native OpenAI object from an .
+ /// Creates or extracts a native OpenAI object from an .
///
/// The agent response.
- /// The OpenAI object.
+ /// The OpenAI object.
/// is .
- public static OpenAIResponse AsOpenAIResponse(this AgentRunResponse response)
+ public static ResponseResult AsOpenAIResponse(this AgentRunResponse response)
{
Throw.IfNull(response);
return
- response.RawRepresentation as OpenAIResponse ??
- response.AsChatResponse().AsOpenAIResponse();
+ response.RawRepresentation as ResponseResult ??
+ response.AsChatResponse().AsOpenAIResponseResult();
}
}
diff --git a/dotnet/src/Microsoft.Agents.AI.OpenAI/Extensions/OpenAIResponseClientExtensions.cs b/dotnet/src/Microsoft.Agents.AI.OpenAI/Extensions/OpenAIResponseClientExtensions.cs
index 0d48147c77..224bf5db95 100644
--- a/dotnet/src/Microsoft.Agents.AI.OpenAI/Extensions/OpenAIResponseClientExtensions.cs
+++ b/dotnet/src/Microsoft.Agents.AI.OpenAI/Extensions/OpenAIResponseClientExtensions.cs
@@ -8,7 +8,7 @@
namespace OpenAI.Responses;
///
-/// Provides extension methods for
+/// Provides extension methods for
/// to simplify the creation of AI agents that work with OpenAI services.
///
///
@@ -20,9 +20,9 @@ namespace OpenAI.Responses;
public static class OpenAIResponseClientExtensions
{
///
- /// Creates an AI agent from an using the OpenAI Response API.
+ /// Creates an AI agent from an using the OpenAI Response API.
///
- /// The to use for the agent.
+ /// The to use for the agent.
/// Optional system instructions that define the agent's behavior and personality.
/// Optional name for the agent for identification purposes.
/// Optional description of the agent's capabilities and purpose.
@@ -33,7 +33,7 @@ public static class OpenAIResponseClientExtensions
/// An instance backed by the OpenAI Response service.
/// Thrown when is .
public static ChatClientAgent CreateAIAgent(
- this OpenAIResponseClient client,
+ this ResponsesClient client,
string? instructions = null,
string? name = null,
string? description = null,
@@ -61,9 +61,9 @@ public static ChatClientAgent CreateAIAgent(
}
///
- /// Creates an AI agent from an using the OpenAI Response API.
+ /// Creates an AI agent from an using the OpenAI Response API.
///
- /// The to use for the agent.
+ /// The to use for the agent.
/// Full set of options to configure the agent.
/// Provides a way to customize the creation of the underlying used by the agent.
/// Optional logger factory for enabling logging within the agent.
@@ -71,7 +71,7 @@ public static ChatClientAgent CreateAIAgent(
/// An instance backed by the OpenAI Response service.
/// Thrown when or is .
public static ChatClientAgent CreateAIAgent(
- this OpenAIResponseClient client,
+ this ResponsesClient client,
ChatClientAgentOptions options,
Func? clientFactory = null,
ILoggerFactory? loggerFactory = null,
diff --git a/dotnet/src/Microsoft.Agents.AI.Workflows.Declarative.AzureAI/AzureAgentProvider.cs b/dotnet/src/Microsoft.Agents.AI.Workflows.Declarative.AzureAI/AzureAgentProvider.cs
index c4a613901c..d4010a43c2 100644
--- a/dotnet/src/Microsoft.Agents.AI.Workflows.Declarative.AzureAI/AzureAgentProvider.cs
+++ b/dotnet/src/Microsoft.Agents.AI.Workflows.Declarative.AzureAI/AzureAgentProvider.cs
@@ -111,7 +111,7 @@ public override async IAsyncEnumerable InvokeAgentAsync(
if (inputArguments is not null)
{
JsonNode jsonNode = ConvertDictionaryToJson(inputArguments);
- ResponseCreationOptions responseCreationOptions = new();
+ CreateResponseOptions responseCreationOptions = new();
#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
responseCreationOptions.Patch.Set("$.structured_inputs"u8, BinaryData.FromString(jsonNode.ToJsonString()));
#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
@@ -206,7 +206,7 @@ private async Task GetAgentAsync(AgentVersion agentVersion, Cancellatio
public override async Task GetMessageAsync(string conversationId, string messageId, CancellationToken cancellationToken = default)
{
AgentResponseItem responseItem = await this.GetConversationClient().GetProjectConversationItemAsync(conversationId, messageId, include: null, cancellationToken).ConfigureAwait(false);
- ResponseItem[] items = [responseItem.AsOpenAIResponseItem()];
+ ResponseItem[] items = [responseItem.AsResponseResultItem()];
return items.AsChatMessages().Single();
}
@@ -223,7 +223,7 @@ public override async IAsyncEnumerable GetMessagesAsync(
await foreach (AgentResponseItem responseItem in this.GetConversationClient().GetProjectConversationItemsAsync(conversationId, null, limit, order.ToString(), after, before, include: null, cancellationToken).ConfigureAwait(false))
{
- ResponseItem[] items = [responseItem.AsOpenAIResponseItem()];
+ ResponseItem[] items = [responseItem.AsResponseResultItem()];
foreach (ChatMessage message in items.AsChatMessages())
{
yield return message;
diff --git a/dotnet/tests/AzureAI.IntegrationTests/AIProjectClientFixture.cs b/dotnet/tests/AzureAI.IntegrationTests/AIProjectClientFixture.cs
index e982c8081f..883b317f5e 100644
--- a/dotnet/tests/AzureAI.IntegrationTests/AIProjectClientFixture.cs
+++ b/dotnet/tests/AzureAI.IntegrationTests/AIProjectClientFixture.cs
@@ -89,7 +89,7 @@ private async Task> GetChatHistoryFromConversationAsync(string
List messages = [];
await foreach (AgentResponseItem item in this._client.GetProjectOpenAIClient().GetProjectConversationsClient().GetProjectConversationItemsAsync(conversationId, order: "asc"))
{
- var openAIItem = item.AsOpenAIResponseItem();
+ var openAIItem = item.AsResponseResultItem();
if (openAIItem is MessageResponseItem messageItem)
{
messages.Add(new ChatMessage
diff --git a/dotnet/tests/Microsoft.Agents.AI.Hosting.OpenAI.UnitTests/OpenAIResponsesIntegrationTests.cs b/dotnet/tests/Microsoft.Agents.AI.Hosting.OpenAI.UnitTests/OpenAIResponsesIntegrationTests.cs
index abf66a732f..2dd5b85e5f 100644
--- a/dotnet/tests/Microsoft.Agents.AI.Hosting.OpenAI.UnitTests/OpenAIResponsesIntegrationTests.cs
+++ b/dotnet/tests/Microsoft.Agents.AI.Hosting.OpenAI.UnitTests/OpenAIResponsesIntegrationTests.cs
@@ -49,7 +49,7 @@ public async Task CreateResponseStreaming_WithSimpleMessage_ReturnsStreamingUpda
const string ExpectedResponse = "One Two Three";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
AsyncCollectionResult streamingResult = responseClient.CreateResponseStreamingAsync("Count to 3");
@@ -90,10 +90,10 @@ public async Task CreateResponse_WithSimpleMessage_ReturnsCompleteResponseAsync(
const string ExpectedResponse = "Hello! How can I help you today?";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
- OpenAIResponse response = await responseClient.CreateResponseAsync("Hello");
+ ResponseResult response = await responseClient.CreateResponseAsync("Hello");
// Assert
Assert.NotNull(response);
@@ -117,7 +117,7 @@ public async Task CreateResponseStreaming_WithMultipleChunks_StreamsAllContentAs
const string ExpectedResponse = "This is a test response with multiple words";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
AsyncCollectionResult streamingResult = responseClient.CreateResponseStreamingAsync("Test");
@@ -162,12 +162,12 @@ public async Task CreateResponse_WithMultipleAgents_EachAgentRespondsCorrectlyAs
(Agent1Name, Agent1Instructions, Agent1Response),
(Agent2Name, Agent2Instructions, Agent2Response));
- OpenAIResponseClient responseClient1 = this.CreateResponseClient(Agent1Name);
- OpenAIResponseClient responseClient2 = this.CreateResponseClient(Agent2Name);
+ ResponsesClient responseClient1 = this.CreateResponseClient(Agent1Name);
+ ResponsesClient responseClient2 = this.CreateResponseClient(Agent2Name);
// Act
- OpenAIResponse response1 = await responseClient1.CreateResponseAsync("Hello");
- OpenAIResponse response2 = await responseClient2.CreateResponseAsync("Hello");
+ ResponseResult response1 = await responseClient1.CreateResponseAsync("Hello");
+ ResponseResult response2 = await responseClient2.CreateResponseAsync("Hello");
// Assert
string content1 = response1.GetOutputText();
@@ -190,10 +190,10 @@ public async Task CreateResponse_SameAgentStreamingAndNonStreaming_BothWorkCorre
const string ExpectedResponse = "This is the response";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act - Non-streaming
- OpenAIResponse nonStreamingResponse = await responseClient.CreateResponseAsync("Test");
+ ResponseResult nonStreamingResponse = await responseClient.CreateResponseAsync("Test");
// Act - Streaming
AsyncCollectionResult streamingResult = responseClient.CreateResponseStreamingAsync("Test");
@@ -224,10 +224,10 @@ public async Task CreateResponse_CompletedResponse_HasCorrectStatusAsync()
const string ExpectedResponse = "Complete";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
- OpenAIResponse response = await responseClient.CreateResponseAsync("Test");
+ ResponseResult response = await responseClient.CreateResponseAsync("Test");
// Assert
Assert.Equal(ResponseStatus.Completed, response.Status);
@@ -247,7 +247,7 @@ public async Task CreateResponseStreaming_VerifyEventSequence_ContainsExpectedEv
const string ExpectedResponse = "Test response with multiple words";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
AsyncCollectionResult streamingResult = responseClient.CreateResponseStreamingAsync("Test");
@@ -286,7 +286,7 @@ public async Task CreateResponseStreaming_EmptyResponse_HandlesGracefullyAsync()
const string ExpectedResponse = "";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
AsyncCollectionResult streamingResult = responseClient.CreateResponseStreamingAsync("Test");
@@ -316,10 +316,10 @@ public async Task CreateResponse_IncludesMetadata_HasRequiredFieldsAsync()
const string ExpectedResponse = "Response with metadata";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
- OpenAIResponse response = await responseClient.CreateResponseAsync("Test");
+ ResponseResult response = await responseClient.CreateResponseAsync("Test");
// Assert
Assert.NotNull(response.Id);
@@ -340,7 +340,7 @@ public async Task CreateResponseStreaming_LongText_StreamsAllContentAsync()
string expectedResponse = string.Join(" ", Enumerable.Range(1, 100).Select(i => $"Word{i}"));
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, expectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
AsyncCollectionResult streamingResult = responseClient.CreateResponseStreamingAsync("Generate long text");
@@ -371,7 +371,7 @@ public async Task CreateResponseStreaming_OutputIndices_AreConsistentAsync()
const string ExpectedResponse = "Test output index";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
AsyncCollectionResult streamingResult = responseClient.CreateResponseStreamingAsync("Test");
@@ -407,7 +407,7 @@ public async Task CreateResponseStreaming_SingleWord_StreamsCorrectlyAsync()
const string ExpectedResponse = "Hello";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
AsyncCollectionResult streamingResult = responseClient.CreateResponseStreamingAsync("Test");
@@ -437,7 +437,7 @@ public async Task CreateResponseStreaming_SpecialCharacters_PreservesFormattingA
const string ExpectedResponse = "Hello! How are you? I'm fine. 100% great!";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
AsyncCollectionResult streamingResult = responseClient.CreateResponseStreamingAsync("Test");
@@ -467,10 +467,10 @@ public async Task CreateResponse_SpecialCharacters_PreservesContentAsync()
const string ExpectedResponse = "Symbols: @#$%^&*() Quotes: \"Hello\" 'World' Unicode: 你好 🌍";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
- OpenAIResponse response = await responseClient.CreateResponseAsync("Test");
+ ResponseResult response = await responseClient.CreateResponseAsync("Test");
// Assert
string content = response.GetOutputText();
@@ -489,7 +489,7 @@ public async Task CreateResponseStreaming_ItemIds_AreConsistentAsync()
const string ExpectedResponse = "Testing item IDs";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
AsyncCollectionResult streamingResult = responseClient.CreateResponseStreamingAsync("Test");
@@ -525,12 +525,12 @@ public async Task CreateResponse_MultipleSequentialRequests_AllSucceedAsync()
const string ExpectedResponse = "Response";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act & Assert - Make 5 sequential requests
for (int i = 0; i < 5; i++)
{
- OpenAIResponse response = await responseClient.CreateResponseAsync($"Request {i}");
+ ResponseResult response = await responseClient.CreateResponseAsync($"Request {i}");
Assert.NotNull(response);
Assert.Equal(ResponseStatus.Completed, response.Status);
Assert.Equal(ExpectedResponse, response.GetOutputText());
@@ -549,7 +549,7 @@ public async Task CreateResponseStreaming_MultipleSequentialRequests_AllStreamCo
const string ExpectedResponse = "Streaming response";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act & Assert - Make 3 sequential streaming requests
for (int i = 0; i < 3; i++)
@@ -581,13 +581,13 @@ public async Task CreateResponse_MultipleRequests_GenerateUniqueIdsAsync()
const string ExpectedResponse = "Response";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
List responseIds = [];
for (int i = 0; i < 10; i++)
{
- OpenAIResponse response = await responseClient.CreateResponseAsync($"Request {i}");
+ ResponseResult response = await responseClient.CreateResponseAsync($"Request {i}");
responseIds.Add(response.Id);
}
@@ -608,7 +608,7 @@ public async Task CreateResponseStreaming_SequenceNumbers_AreMonotonicallyIncrea
const string ExpectedResponse = "Test sequence numbers with multiple words";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
AsyncCollectionResult streamingResult = responseClient.CreateResponseStreamingAsync("Test");
@@ -641,10 +641,10 @@ public async Task CreateResponse_ModelInformation_IsCorrectAsync()
const string ExpectedResponse = "Test model info";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
- OpenAIResponse response = await responseClient.CreateResponseAsync("Test");
+ ResponseResult response = await responseClient.CreateResponseAsync("Test");
// Assert
Assert.NotNull(response.Model);
@@ -663,7 +663,7 @@ public async Task CreateResponseStreaming_Punctuation_PreservesContentAsync()
const string ExpectedResponse = "Hello, world! How are you today? I'm doing well.";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
AsyncCollectionResult streamingResult = responseClient.CreateResponseStreamingAsync("Test");
@@ -693,10 +693,10 @@ public async Task CreateResponse_ShortInput_ReturnsValidResponseAsync()
const string ExpectedResponse = "OK";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
- OpenAIResponse response = await responseClient.CreateResponseAsync("Hi");
+ ResponseResult response = await responseClient.CreateResponseAsync("Hi");
// Assert
Assert.NotNull(response);
@@ -716,7 +716,7 @@ public async Task CreateResponseStreaming_ContentIndices_AreConsistentAsync()
const string ExpectedResponse = "Test content indices";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
AsyncCollectionResult streamingResult = responseClient.CreateResponseStreamingAsync("Test");
@@ -748,10 +748,10 @@ public async Task CreateResponse_Newlines_PreservesFormattingAsync()
const string ExpectedResponse = "Line 1\nLine 2\nLine 3";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
- OpenAIResponse response = await responseClient.CreateResponseAsync("Test");
+ ResponseResult response = await responseClient.CreateResponseAsync("Test");
// Assert
string content = response.GetOutputText();
@@ -771,7 +771,7 @@ public async Task CreateResponseStreaming_Newlines_PreservesFormattingAsync()
const string ExpectedResponse = "First line\nSecond line\nThird line";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
AsyncCollectionResult streamingResult = responseClient.CreateResponseStreamingAsync("Test");
@@ -807,10 +807,10 @@ public async Task CreateResponse_ImageContent_ReturnsCorrectlyAsync()
instructions: Instructions,
chatClient: new TestHelpers.ImageContentMockChatClient(ImageUrl));
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
- OpenAIResponse response = await responseClient.CreateResponseAsync("Show me an image");
+ ResponseResult response = await responseClient.CreateResponseAsync("Show me an image");
// Assert
Assert.NotNull(response);
@@ -834,7 +834,7 @@ public async Task CreateResponseStreaming_ImageContent_StreamsCorrectlyAsync()
instructions: Instructions,
chatClient: new TestHelpers.ImageContentMockChatClient(ImageUrl));
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
AsyncCollectionResult streamingResult = responseClient.CreateResponseStreamingAsync("Show me an image");
@@ -868,10 +868,10 @@ public async Task CreateResponse_AudioContent_ReturnsCorrectlyAsync()
instructions: Instructions,
chatClient: new TestHelpers.AudioContentMockChatClient(AudioData, Transcript));
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
- OpenAIResponse response = await responseClient.CreateResponseAsync("Generate audio");
+ ResponseResult response = await responseClient.CreateResponseAsync("Generate audio");
// Assert
Assert.NotNull(response);
@@ -896,7 +896,7 @@ public async Task CreateResponseStreaming_AudioContent_StreamsCorrectlyAsync()
instructions: Instructions,
chatClient: new TestHelpers.AudioContentMockChatClient(AudioData, Transcript));
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
AsyncCollectionResult streamingResult = responseClient.CreateResponseStreamingAsync("Generate audio");
@@ -930,10 +930,10 @@ public async Task CreateResponse_FunctionCall_ReturnsCorrectlyAsync()
instructions: Instructions,
chatClient: new TestHelpers.FunctionCallMockChatClient(FunctionName, Arguments));
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
- OpenAIResponse response = await responseClient.CreateResponseAsync("What's the weather?");
+ ResponseResult response = await responseClient.CreateResponseAsync("What's the weather?");
// Assert
Assert.NotNull(response);
@@ -957,7 +957,7 @@ public async Task CreateResponseStreaming_FunctionCall_StreamsCorrectlyAsync()
instructions: Instructions,
chatClient: new TestHelpers.FunctionCallMockChatClient(FunctionName, Arguments));
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
AsyncCollectionResult streamingResult = responseClient.CreateResponseStreamingAsync("Calculate 2+2");
@@ -988,10 +988,10 @@ public async Task CreateResponse_MixedContent_ReturnsCorrectlyAsync()
instructions: Instructions,
chatClient: new TestHelpers.MixedContentMockChatClient());
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
- OpenAIResponse response = await responseClient.CreateResponseAsync("Show me various content");
+ ResponseResult response = await responseClient.CreateResponseAsync("Show me various content");
// Assert
Assert.NotNull(response);
@@ -1014,7 +1014,7 @@ public async Task CreateResponseStreaming_MixedContent_StreamsCorrectlyAsync()
instructions: Instructions,
chatClient: new TestHelpers.MixedContentMockChatClient());
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
AsyncCollectionResult streamingResult = responseClient.CreateResponseStreamingAsync("Show me various content");
@@ -1047,7 +1047,7 @@ public async Task CreateResponseStreaming_TextDone_IncludesDoneEventAsync()
const string ExpectedResponse = "Complete text response";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
AsyncCollectionResult streamingResult = responseClient.CreateResponseStreamingAsync("Test");
@@ -1075,7 +1075,7 @@ public async Task CreateResponseStreaming_ContentPartAdded_IncludesEventAsync()
const string ExpectedResponse = "Response with content parts";
this._httpClient = await this.CreateTestServerAsync(AgentName, Instructions, ExpectedResponse);
- OpenAIResponseClient responseClient = this.CreateResponseClient(AgentName);
+ ResponsesClient responseClient = this.CreateResponseClient(AgentName);
// Act
AsyncCollectionResult streamingResult = responseClient.CreateResponseStreamingAsync("Test");
@@ -1122,7 +1122,7 @@ public async Task CreateResponse_WithConversationId_DoesNotForwardConversationId
string conversationId = convDoc.RootElement.GetProperty("id").GetString()!;
// Act - Send request with conversation ID using raw HTTP
- // (OpenAI SDK doesn't expose ConversationId directly on ResponseCreationOptions)
+ // (OpenAI SDK doesn't expose ConversationId directly on CreateResponseOptions)
var requestBody = new
{
input = "Test",
@@ -1201,9 +1201,9 @@ public async Task CreateResponseStreaming_WithConversationId_DoesNotForwardConve
Assert.Null(mockChatClient.LastChatOptions.ConversationId);
}
- private OpenAIResponseClient CreateResponseClient(string agentName)
+ private ResponsesClient CreateResponseClient(string agentName)
{
- return new OpenAIResponseClient(
+ return new ResponsesClient(
model: "test-model",
credential: new ApiKeyCredential("test-api-key"),
options: new OpenAIClientOptions
diff --git a/dotnet/tests/Microsoft.Agents.AI.OpenAI.UnitTests/Extensions/OpenAIResponseClientExtensionsTests.cs b/dotnet/tests/Microsoft.Agents.AI.OpenAI.UnitTests/Extensions/OpenAIResponseClientExtensionsTests.cs
index 781ccb123e..127fe1a58f 100644
--- a/dotnet/tests/Microsoft.Agents.AI.OpenAI.UnitTests/Extensions/OpenAIResponseClientExtensionsTests.cs
+++ b/dotnet/tests/Microsoft.Agents.AI.OpenAI.UnitTests/Extensions/OpenAIResponseClientExtensionsTests.cs
@@ -55,9 +55,9 @@ public async IAsyncEnumerable GetStreamingResponseAsync(
}
///
- /// Creates a test OpenAIResponseClient implementation for testing.
+ /// Creates a test ResponsesClient implementation for testing.
///
- private sealed class TestOpenAIResponseClient : OpenAIResponseClient
+ private sealed class TestOpenAIResponseClient : ResponsesClient
{
public TestOpenAIResponseClient()
{
@@ -147,7 +147,7 @@ public void CreateAIAgent_WithNullClient_ThrowsArgumentNullException()
{
// Act & Assert
var exception = Assert.Throws(() =>
- ((OpenAIResponseClient)null!).CreateAIAgent());
+ ((ResponsesClient)null!).CreateAIAgent());
Assert.Equal("client", exception.ParamName);
}
diff --git a/dotnet/tests/OpenAIResponse.IntegrationTests/OpenAIResponseChatClientAgentRunStreamingTests.cs b/dotnet/tests/OpenAIResponse.IntegrationTests/OpenAIResponseChatClientAgentRunStreamingTests.cs
index 669a4dd2a0..80a148d7fc 100644
--- a/dotnet/tests/OpenAIResponse.IntegrationTests/OpenAIResponseChatClientAgentRunStreamingTests.cs
+++ b/dotnet/tests/OpenAIResponse.IntegrationTests/OpenAIResponseChatClientAgentRunStreamingTests.cs
@@ -3,11 +3,11 @@
using System.Threading.Tasks;
using AgentConformance.IntegrationTests;
-namespace OpenAIResponse.IntegrationTests;
+namespace ResponseResult.IntegrationTests;
public class OpenAIResponseStoreTrueChatClientAgentRunStreamingTests() : ChatClientAgentRunStreamingTests(() => new(store: true))
{
- private const string SkipReason = "OpenAIResponse does not support empty messages";
+ private const string SkipReason = "ResponseResult does not support empty messages";
[Fact(Skip = SkipReason)]
public override Task RunWithInstructionsAndNoMessageReturnsExpectedResultAsync() =>
@@ -16,7 +16,7 @@ public override Task RunWithInstructionsAndNoMessageReturnsExpectedResultAsync()
public class OpenAIResponseStoreFalseChatClientAgentRunStreamingTests() : ChatClientAgentRunStreamingTests(() => new(store: false))
{
- private const string SkipReason = "OpenAIResponse does not support empty messages";
+ private const string SkipReason = "ResponseResult does not support empty messages";
[Fact(Skip = SkipReason)]
public override Task RunWithInstructionsAndNoMessageReturnsExpectedResultAsync() =>
diff --git a/dotnet/tests/OpenAIResponse.IntegrationTests/OpenAIResponseChatClientAgentRunTests.cs b/dotnet/tests/OpenAIResponse.IntegrationTests/OpenAIResponseChatClientAgentRunTests.cs
index af2f1c14ec..8b742e2964 100644
--- a/dotnet/tests/OpenAIResponse.IntegrationTests/OpenAIResponseChatClientAgentRunTests.cs
+++ b/dotnet/tests/OpenAIResponse.IntegrationTests/OpenAIResponseChatClientAgentRunTests.cs
@@ -3,11 +3,11 @@
using System.Threading.Tasks;
using AgentConformance.IntegrationTests;
-namespace OpenAIResponse.IntegrationTests;
+namespace ResponseResult.IntegrationTests;
public class OpenAIResponseStoreTrueChatClientAgentRunTests() : ChatClientAgentRunTests(() => new(store: true))
{
- private const string SkipReason = "OpenAIResponse does not support empty messages";
+ private const string SkipReason = "ResponseResult does not support empty messages";
[Fact(Skip = SkipReason)]
public override Task RunWithInstructionsAndNoMessageReturnsExpectedResultAsync() =>
@@ -16,7 +16,7 @@ public override Task RunWithInstructionsAndNoMessageReturnsExpectedResultAsync()
public class OpenAIResponseStoreFalseChatClientAgentRunTests() : ChatClientAgentRunTests(() => new(store: false))
{
- private const string SkipReason = "OpenAIResponse does not support empty messages";
+ private const string SkipReason = "ResponseResult does not support empty messages";
[Fact(Skip = SkipReason)]
public override Task RunWithInstructionsAndNoMessageReturnsExpectedResultAsync() =>
diff --git a/dotnet/tests/OpenAIResponse.IntegrationTests/OpenAIResponseFixture.cs b/dotnet/tests/OpenAIResponse.IntegrationTests/OpenAIResponseFixture.cs
index a58583fbca..c6c84db569 100644
--- a/dotnet/tests/OpenAIResponse.IntegrationTests/OpenAIResponseFixture.cs
+++ b/dotnet/tests/OpenAIResponse.IntegrationTests/OpenAIResponseFixture.cs
@@ -12,13 +12,13 @@
using OpenAI.Responses;
using Shared.IntegrationTests;
-namespace OpenAIResponse.IntegrationTests;
+namespace ResponseResult.IntegrationTests;
public class OpenAIResponseFixture(bool store) : IChatClientAgentFixture
{
private static readonly OpenAIConfiguration s_config = TestConfiguration.LoadSection();
- private OpenAIResponseClient _openAIResponseClient = null!;
+ private ResponsesClient _openAIResponseClient = null!;
private ChatClientAgent _agent = null!;
public AIAgent Agent => this._agent;
@@ -77,7 +77,7 @@ public async Task CreateChatClientAgentAsync(
{
Instructions = instructions,
Tools = aiTools,
- RawRepresentationFactory = new Func(_ => new ResponseCreationOptions() { StoredOutputEnabled = store })
+ RawRepresentationFactory = new Func(_ => new CreateResponseOptions() { StoredOutputEnabled = store })
},
});
@@ -92,7 +92,7 @@ public Task DeleteThreadAsync(AgentThread thread) =>
public async Task InitializeAsync()
{
this._openAIResponseClient = new OpenAIClient(s_config.ApiKey)
- .GetOpenAIResponseClient(s_config.ChatModelId);
+ .GetResponsesClient(s_config.ChatModelId);
this._agent = await this.CreateChatClientAgentAsync();
}
diff --git a/dotnet/tests/OpenAIResponse.IntegrationTests/OpenAIResponseRunStreamingTests.cs b/dotnet/tests/OpenAIResponse.IntegrationTests/OpenAIResponseRunStreamingTests.cs
index e2e7e28bbd..c12f8f2db5 100644
--- a/dotnet/tests/OpenAIResponse.IntegrationTests/OpenAIResponseRunStreamingTests.cs
+++ b/dotnet/tests/OpenAIResponse.IntegrationTests/OpenAIResponseRunStreamingTests.cs
@@ -3,11 +3,11 @@
using System.Threading.Tasks;
using AgentConformance.IntegrationTests;
-namespace OpenAIResponse.IntegrationTests;
+namespace ResponseResult.IntegrationTests;
public class OpenAIResponseStoreTrueRunStreamingTests() : RunStreamingTests(() => new(store: true))
{
- private const string SkipReason = "OpenAIResponse does not support empty messages";
+ private const string SkipReason = "ResponseResult does not support empty messages";
[Fact(Skip = SkipReason)]
public override Task RunWithNoMessageDoesNotFailAsync() =>
Task.CompletedTask;
@@ -15,7 +15,7 @@ public override Task RunWithNoMessageDoesNotFailAsync() =>
public class OpenAIResponseStoreFalseRunStreamingTests() : RunStreamingTests(() => new(store: false))
{
- private const string SkipReason = "OpenAIResponse does not support empty messages";
+ private const string SkipReason = "ResponseResult does not support empty messages";
[Fact(Skip = SkipReason)]
public override Task RunWithNoMessageDoesNotFailAsync() =>
diff --git a/dotnet/tests/OpenAIResponse.IntegrationTests/OpenAIResponseRunTests.cs b/dotnet/tests/OpenAIResponse.IntegrationTests/OpenAIResponseRunTests.cs
index 41c5254474..423ac583c7 100644
--- a/dotnet/tests/OpenAIResponse.IntegrationTests/OpenAIResponseRunTests.cs
+++ b/dotnet/tests/OpenAIResponse.IntegrationTests/OpenAIResponseRunTests.cs
@@ -3,11 +3,11 @@
using System.Threading.Tasks;
using AgentConformance.IntegrationTests;
-namespace OpenAIResponse.IntegrationTests;
+namespace ResponseResult.IntegrationTests;
public class OpenAIResponseStoreTrueRunTests() : RunTests(() => new(store: true))
{
- private const string SkipReason = "OpenAIResponse does not support empty messages";
+ private const string SkipReason = "ResponseResult does not support empty messages";
[Fact(Skip = SkipReason)]
public override Task RunWithNoMessageDoesNotFailAsync() =>
Task.CompletedTask;
@@ -15,7 +15,7 @@ public override Task RunWithNoMessageDoesNotFailAsync() =>
public class OpenAIResponseStoreFalseRunTests() : RunTests(() => new(store: false))
{
- private const string SkipReason = "OpenAIResponse does not support empty messages";
+ private const string SkipReason = "ResponseResult does not support empty messages";
[Fact(Skip = SkipReason)]
public override Task RunWithNoMessageDoesNotFailAsync() =>