Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/upgrades/prompts/SemanticKernelToAgentFramework.md
Original file line number Diff line number Diff line change
Expand Up @@ -839,7 +839,7 @@ var agentOptions = new ChatClientAgentRunOptions(new ChatOptions
{
MaxOutputTokens = 8000,
// Breaking glass to access provider-specific options
RawRepresentationFactory = (_) => new OpenAI.Responses.ResponseCreationOptions()
RawRepresentationFactory = (_) => new OpenAI.Responses.CreateResponseOptions()
{
ReasoningOptions = new()
{
Expand Down
1 change: 1 addition & 0 deletions dotnet/.editorconfig
Original file line number Diff line number Diff line change
Expand Up @@ -209,6 +209,7 @@ dotnet_diagnostic.CA2000.severity = none # Call System.IDisposable.Dispose on ob
dotnet_diagnostic.CA2225.severity = none # Operator overloads have named alternates
dotnet_diagnostic.CA2227.severity = none # Change to be read-only by removing the property setter
dotnet_diagnostic.CA2249.severity = suggestion # Consider using 'Contains' method instead of 'IndexOf' method
dotnet_diagnostic.CA2252.severity = none # Requires preview
dotnet_diagnostic.CA2253.severity = none # Named placeholders in the logging message template should not be comprised of only numeric characters
dotnet_diagnostic.CA2253.severity = none # Named placeholders in the logging message template should not be comprised of only numeric characters
dotnet_diagnostic.CA2263.severity = suggestion # Use generic overload
Expand Down
16 changes: 7 additions & 9 deletions dotnet/Directory.Packages.props
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,10 @@
<PackageVersion Include="Aspire.Microsoft.Azure.Cosmos" Version="$(AspireAppHostSdkVersion)" />
<PackageVersion Include="CommunityToolkit.Aspire.OllamaSharp" Version="13.0.0-beta.440" />
<!-- Azure.* -->
<PackageVersion Include="Azure.AI.Projects" Version="1.2.0-beta.3" />
<PackageVersion Include="Azure.AI.Projects.OpenAI" Version="1.0.0-beta.4" />
<PackageVersion Include="Azure.AI.Projects" Version="1.2.0-beta.5" />
<PackageVersion Include="Azure.AI.Projects.OpenAI" Version="1.0.0-beta.5" />
<PackageVersion Include="Azure.AI.Agents.Persistent" Version="1.2.0-beta.8" />
<PackageVersion Include="Azure.AI.OpenAI" Version="2.7.0-beta.2" />
<PackageVersion Include="Azure.AI.OpenAI" Version="2.8.0-beta.1" />
<PackageVersion Include="Azure.Identity" Version="1.17.1" />
<PackageVersion Include="Azure.Monitor.OpenTelemetry.Exporter" Version="1.4.0" />
<!-- Google Gemini -->
Expand Down Expand Up @@ -61,10 +61,9 @@
<PackageVersion Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0" />
<PackageVersion Include="Swashbuckle.AspNetCore.SwaggerUI" Version="10.0.0" />
<!-- Microsoft.Extensions.* -->
<PackageVersion Include="Microsoft.Extensions.AI" Version="10.1.0" />
<PackageVersion Include="Microsoft.Extensions.AI.Abstractions" Version="10.1.0" />
<PackageVersion Include="Microsoft.Extensions.AI.AzureAIInference" Version="10.0.0-preview.1.25559.3" />
<PackageVersion Include="Microsoft.Extensions.AI.OpenAI" Version="10.1.0-preview.1.25608.1" />
<PackageVersion Include="Microsoft.Extensions.AI" Version="10.1.1" />
<PackageVersion Include="Microsoft.Extensions.AI.Abstractions" Version="10.1.1" />
<PackageVersion Include="Microsoft.Extensions.AI.OpenAI" Version="10.1.1-preview.1.25612.2" />
<PackageVersion Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0" />
<PackageVersion Include="Microsoft.Extensions.Configuration" Version="10.0.0" />
<PackageVersion Include="Microsoft.Extensions.Configuration.Binder" Version="10.0.0" />
Expand Down Expand Up @@ -101,11 +100,10 @@
<!-- MCP -->
<PackageVersion Include="ModelContextProtocol" Version="0.4.0-preview.3" />
<!-- Inference SDKs -->
<PackageVersion Include="Anthropic.SDK" Version="5.8.0" />
<PackageVersion Include="AWSSDK.Extensions.Bedrock.MEAI" Version="4.0.4.11" />
<PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI" Version="0.10.0" />
<PackageVersion Include="OllamaSharp" Version="5.4.8" />
<PackageVersion Include="OpenAI" Version="2.7.0" />
<PackageVersion Include="OpenAI" Version="2.8.0" />
<!-- Identity -->
<PackageVersion Include="Microsoft.Identity.Client.Extensions.Msal" Version="4.78.0" />
<!-- Workflows -->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
<PackageReference Include="CommunityToolkit.Aspire.OllamaSharp" />
<PackageReference Include="Microsoft.Extensions.AI" />
<PackageReference Include="Microsoft.Extensions.AI.Abstractions" />
<PackageReference Include="Microsoft.Extensions.AI.AzureAIInference" />
<PackageReference Include="Microsoft.Extensions.AI.OpenAI" />
<PackageReference Include="Microsoft.AspNetCore.OpenAPI" />
<PackageReference Include="Swashbuckle.AspNetCore.SwaggerUI" />
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
// Copyright (c) Microsoft. All rights reserved.

using AgentWebChat.AgentHost.Utilities;
using Azure;
using Azure.AI.Inference;
using Microsoft.Extensions.AI;
using OllamaSharp;

Expand All @@ -24,7 +22,6 @@ public static ChatClientBuilder AddChatClient(this IHostApplicationBuilder build
ClientChatProvider.Ollama => builder.AddOllamaClient(connectionName, connectionInfo),
ClientChatProvider.OpenAI => builder.AddOpenAIClient(connectionName, connectionInfo),
ClientChatProvider.AzureOpenAI => builder.AddAzureOpenAIClient(connectionName).AddChatClient(connectionInfo.SelectedModel),
ClientChatProvider.AzureAIInference => builder.AddAzureInferenceClient(connectionName, connectionInfo),
_ => throw new NotSupportedException($"Unsupported provider: {connectionInfo.Provider}")
};

Expand All @@ -44,16 +41,6 @@ private static ChatClientBuilder AddOpenAIClient(this IHostApplicationBuilder bu
})
.AddChatClient(connectionInfo.SelectedModel);

private static ChatClientBuilder AddAzureInferenceClient(this IHostApplicationBuilder builder, string connectionName, ChatClientConnectionInfo connectionInfo) =>
builder.Services.AddChatClient(sp =>
{
var credential = new AzureKeyCredential(connectionInfo.AccessKey!);

var client = new ChatCompletionsClient(connectionInfo.Endpoint, credential, new AzureAIInferenceClientOptions());

return client.AsIChatClient(connectionInfo.SelectedModel);
});

private static ChatClientBuilder AddOllamaClient(this IHostApplicationBuilder builder, string connectionName, ChatClientConnectionInfo connectionInfo)
{
var httpKey = $"{connectionName}_http";
Expand Down Expand Up @@ -83,7 +70,6 @@ public static ChatClientBuilder AddKeyedChatClient(this IHostApplicationBuilder
ClientChatProvider.Ollama => builder.AddKeyedOllamaClient(connectionName, connectionInfo),
ClientChatProvider.OpenAI => builder.AddKeyedOpenAIClient(connectionName, connectionInfo),
ClientChatProvider.AzureOpenAI => builder.AddKeyedAzureOpenAIClient(connectionName).AddKeyedChatClient(connectionName, connectionInfo.SelectedModel),
ClientChatProvider.AzureAIInference => builder.AddKeyedAzureInferenceClient(connectionName, connectionInfo),
_ => throw new NotSupportedException($"Unsupported provider: {connectionInfo.Provider}")
};

Expand All @@ -103,16 +89,6 @@ private static ChatClientBuilder AddKeyedOpenAIClient(this IHostApplicationBuild
})
.AddKeyedChatClient(connectionName, connectionInfo.SelectedModel);

private static ChatClientBuilder AddKeyedAzureInferenceClient(this IHostApplicationBuilder builder, string connectionName, ChatClientConnectionInfo connectionInfo) =>
builder.Services.AddKeyedChatClient(connectionName, sp =>
{
var credential = new AzureKeyCredential(connectionInfo.AccessKey!);

var client = new ChatCompletionsClient(connectionInfo.Endpoint, credential, new AzureAIInferenceClientOptions());

return client.AsIChatClient(connectionInfo.SelectedModel);
});

private static ChatClientBuilder AddKeyedOllamaClient(this IHostApplicationBuilder builder, string connectionName, ChatClientConnectionInfo connectionInfo)
{
var httpKey = $"{connectionName}_http";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ public override async IAsyncEnumerable<AgentRunResponseUpdate> RunStreamingAsync
Transport = new HttpClientPipelineTransport(httpClient)
};

var openAiClient = new OpenAIResponseClient(model: agentName, credential: new ApiKeyCredential("dummy-key"), options: options).AsIChatClient();
var openAiClient = new ResponsesClient(model: agentName, credential: new ApiKeyCredential("dummy-key"), options: options).AsIChatClient();
var chatOptions = new ChatOptions()
{
ConversationId = threadId
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
AIAgent agent = new AzureOpenAIClient(
new Uri(endpoint),
new AzureCliCredential())
.GetOpenAIResponseClient(deploymentName)
.GetResponsesClient(deploymentName)
.CreateAIAgent(instructions: "You are good at telling jokes.", name: "Joker");

// Invoke the agent and output the text result.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

AIAgent agent = new OpenAIClient(
apiKey)
.GetOpenAIResponseClient(model)
.GetResponsesClient(model)
.CreateAIAgent(instructions: "You are good at telling jokes.", name: "Joker");

// Invoke the agent and output the text result.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@
var model = Environment.GetEnvironmentVariable("OPENAI_MODEL") ?? "gpt-5";

var client = new OpenAIClient(apiKey)
.GetOpenAIResponseClient(model)
.GetResponsesClient(model)
.AsIChatClient().AsBuilder()
.ConfigureOptions(o =>
{
o.RawRepresentationFactory = _ => new ResponseCreationOptions()
o.RawRepresentationFactory = _ => new CreateResponseOptions()
{
ReasoningOptions = new()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,13 @@ public class OpenAIResponseClientAgent : DelegatingAIAgent
/// <summary>
/// Initialize an instance of <see cref="OpenAIResponseClientAgent"/>.
/// </summary>
/// <param name="client">Instance of <see cref="OpenAIResponseClient"/></param>
/// <param name="client">Instance of <see cref="ResponsesClient"/></param>
/// <param name="instructions">Optional instructions for the agent.</param>
/// <param name="name">Optional name for the agent.</param>
/// <param name="description">Optional description for the agent.</param>
/// <param name="loggerFactory">Optional instance of <see cref="ILoggerFactory"/></param>
public OpenAIResponseClientAgent(
OpenAIResponseClient client,
ResponsesClient client,
string? instructions = null,
string? name = null,
string? description = null,
Expand All @@ -39,11 +39,11 @@ public OpenAIResponseClientAgent(
/// <summary>
/// Initialize an instance of <see cref="OpenAIResponseClientAgent"/>.
/// </summary>
/// <param name="client">Instance of <see cref="OpenAIResponseClient"/></param>
/// <param name="client">Instance of <see cref="ResponsesClient"/></param>
/// <param name="options">Options to create the agent.</param>
/// <param name="loggerFactory">Optional instance of <see cref="ILoggerFactory"/></param>
public OpenAIResponseClientAgent(
OpenAIResponseClient client, ChatClientAgentOptions options, ILoggerFactory? loggerFactory = null) :
ResponsesClient client, ChatClientAgentOptions options, ILoggerFactory? loggerFactory = null) :
base(new ChatClientAgent((client ?? throw new ArgumentNullException(nameof(client))).AsIChatClient(), options, loggerFactory))
{
}
Expand All @@ -55,8 +55,8 @@ public OpenAIResponseClientAgent(
/// <param name="thread">The conversation thread to continue with this invocation. If not provided, creates a new thread. The thread will be mutated with the provided messages and agent response.</param>
/// <param name="options">Optional parameters for agent invocation.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> to monitor for cancellation requests. The default is <see cref="CancellationToken.None"/>.</param>
/// <returns>A <see cref="OpenAIResponse"/> containing the list of <see cref="ChatMessage"/> items.</returns>
public virtual async Task<OpenAIResponse> RunAsync(
/// <returns>A <see cref="ResponseResult"/> containing the list of <see cref="ChatMessage"/> items.</returns>
public virtual async Task<ResponseResult> RunAsync(
IEnumerable<ResponseItem> messages,
AgentThread? thread = null,
AgentRunOptions? options = null,
Expand All @@ -74,7 +74,7 @@ public virtual async Task<OpenAIResponse> RunAsync(
/// <param name="thread">The conversation thread to continue with this invocation. If not provided, creates a new thread. The thread will be mutated with the provided messages and agent response.</param>
/// <param name="options">Optional parameters for agent invocation.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> to monitor for cancellation requests. The default is <see cref="CancellationToken.None"/>.</param>
/// <returns>A <see cref="OpenAIResponse"/> containing the list of <see cref="ChatMessage"/> items.</returns>
/// <returns>A <see cref="ResponseResult"/> containing the list of <see cref="ChatMessage"/> items.</returns>
public virtual async IAsyncEnumerable<StreamingResponseUpdate> RunStreamingAsync(
IEnumerable<ResponseItem> messages,
AgentThread? thread = null,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
// Copyright (c) Microsoft. All rights reserved.

// This sample demonstrates how to create OpenAIResponseClientAgent directly from an OpenAIResponseClient instance.
// This sample demonstrates how to create OpenAIResponseClientAgent directly from an ResponsesClient instance.

using OpenAI;
using OpenAI.Responses;
Expand All @@ -9,16 +9,16 @@
var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new InvalidOperationException("OPENAI_API_KEY is not set.");
var model = Environment.GetEnvironmentVariable("OPENAI_MODEL") ?? "gpt-4o-mini";

// Create an OpenAIResponseClient directly from OpenAIClient
OpenAIResponseClient responseClient = new OpenAIClient(apiKey).GetOpenAIResponseClient(model);
// Create a ResponsesClient directly from OpenAIClient
ResponsesClient responseClient = new OpenAIClient(apiKey).GetResponsesClient(model);

// Create an agent directly from the OpenAIResponseClient using OpenAIResponseClientAgent
// Create an agent directly from the ResponsesClient using OpenAIResponseClientAgent
OpenAIResponseClientAgent agent = new(responseClient, instructions: "You are good at telling jokes.", name: "Joker");

ResponseItem userMessage = ResponseItem.CreateUserMessageItem("Tell me a joke about a pirate.");

// Invoke the agent and output the text result.
OpenAIResponse response = await agent.RunAsync([userMessage]);
ResponseResult response = await agent.RunAsync([userMessage]);
Console.WriteLine(response.GetOutputText());

// Invoke the agent with streaming support.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@
OpenAIClient openAIClient = new(apiKey);
ConversationClient conversationClient = openAIClient.GetConversationClient();

// Create an agent directly from the OpenAIResponseClient using OpenAIResponseClientAgent
ChatClientAgent agent = new(openAIClient.GetOpenAIResponseClient(model).AsIChatClient(), instructions: "You are a helpful assistant.", name: "ConversationAgent");
// Create an agent directly from the ResponsesClient using OpenAIResponseClientAgent
ChatClientAgent agent = new(openAIClient.GetResponsesClient(model).AsIChatClient(), instructions: "You are a helpful assistant.", name: "ConversationAgent");

ClientResult createConversationResult = await conversationClient.CreateConversationAsync(BinaryContent.Create(BinaryData.FromString("{}")));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
AIAgent agent = new AzureOpenAIClient(
new Uri(endpoint),
new AzureCliCredential())
.GetOpenAIResponseClient(deploymentName)
.GetResponsesClient(deploymentName)
.CreateAIAgent(
name: "SpaceNovelWriter",
instructions: "You are a space novel writer. Always research relevant facts and generate character profiles for the main characters before writing novels." +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
AIAgent agent = new AzureOpenAIClient(
new Uri(endpoint),
new AzureCliCredential())
.GetOpenAIResponseClient(deploymentName)
.GetResponsesClient(deploymentName)
.CreateAIAgent();

// Enable background responses (only supported by OpenAI Responses at this time).
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ private static async Task InvokeComputerUseAgentAsync(AIAgent agent)
Dictionary<string, byte[]> screenshots = ComputerUseUtil.LoadScreenshotAssets();

ChatOptions chatOptions = new();
ResponseCreationOptions responseCreationOptions = new()
CreateResponseOptions responseCreationOptions = new()
{
TruncationMode = ResponseTruncationMode.Auto
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
AIAgent agent = new AzureOpenAIClient(
new Uri(endpoint),
new AzureCliCredential())
.GetOpenAIResponseClient(deploymentName)
.GetResponsesClient(deploymentName)
.CreateAIAgent(
instructions: "You answer questions by searching the Microsoft Learn content only.",
name: "MicrosoftLearnAgent",
Expand All @@ -57,7 +57,7 @@
AIAgent agentWithRequiredApproval = new AzureOpenAIClient(
new Uri(endpoint),
new AzureCliCredential())
.GetOpenAIResponseClient(deploymentName)
.GetResponsesClient(deploymentName)
.CreateAIAgent(
instructions: "You answer questions by searching the Microsoft Learn content only.",
name: "MicrosoftLearnAgentWithApproval",
Expand Down
2 changes: 1 addition & 1 deletion dotnet/samples/Purview/AgentWithPurview/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
using IChatClient client = new AzureOpenAIClient(
new Uri(endpoint),
new AzureCliCredential())
.GetOpenAIResponseClient(deploymentName)
.GetResponsesClient(deploymentName)
.AsIChatClient()
.AsBuilder()
.WithPurview(browserCredential, new PurviewSettings("Agent Framework Test App"))
Expand Down
17 changes: 7 additions & 10 deletions dotnet/src/Microsoft.Agents.AI.AzureAI/AzureAIProjectChatClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,6 @@ internal sealed class AzureAIProjectChatClient : DelegatingChatClient
private readonly AgentRecord? _agentRecord;
private readonly ChatOptions? _chatOptions;
private readonly AgentReference _agentReference;
/// <summary>
/// The usage of a no-op model is a necessary change to avoid OpenAIClients to throw exceptions when
/// used with Azure AI Agents as the model used is now defined at the agent creation time.
/// </summary>
private const string NoOpModel = "no-op";

/// <summary>
/// Initializes a new instance of the <see cref="AzureAIProjectChatClient"/> class.
Expand All @@ -42,7 +37,7 @@ internal sealed class AzureAIProjectChatClient : DelegatingChatClient
internal AzureAIProjectChatClient(AIProjectClient aiProjectClient, AgentReference agentReference, string? defaultModelId, ChatOptions? chatOptions)
: base(Throw.IfNull(aiProjectClient)
.GetProjectOpenAIClient()
.GetOpenAIResponseClient(defaultModelId ?? NoOpModel)
.GetProjectResponsesClientForAgent(agentReference)
.AsIChatClient())
{
this._agentClient = aiProjectClient;
Expand Down Expand Up @@ -132,13 +127,15 @@ private ChatOptions GetAgentEnabledChatOptions(ChatOptions? options)

agentEnabledChatOptions.RawRepresentationFactory = (client) =>
{
if (originalFactory?.Invoke(this) is not ResponseCreationOptions responseCreationOptions)
if (originalFactory?.Invoke(this) is not CreateResponseOptions responseCreationOptions)
{
responseCreationOptions = new ResponseCreationOptions();
responseCreationOptions = new CreateResponseOptions();
}

ResponseCreationOptionsExtensions.set_Agent(responseCreationOptions, this._agentReference);
ResponseCreationOptionsExtensions.set_Model(responseCreationOptions, null);
responseCreationOptions.Agent = this._agentReference;
#pragma warning disable SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.
responseCreationOptions.Patch.Remove("$.model"u8);
#pragma warning restore SCME0001 // Type is for evaluation purposes only and is subject to change or removal in future updates. Suppress this diagnostic to proceed.

return responseCreationOptions;
};
Expand Down
Loading
Loading