Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions dotnet/Directory.Packages.props
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@
<!-- MCP -->
<PackageVersion Include="ModelContextProtocol" Version="1.1.0" />
<!-- Inference SDKs -->
<PackageVersion Include="Microsoft.AI.Foundry.Local" Version="0.9.0" />
<PackageVersion Include="AWSSDK.Extensions.Bedrock.MEAI" Version="4.0.5.1" />
<PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI" Version="0.10.0" />
<PackageVersion Include="Microsoft.ML.Tokenizers" Version="2.0.0" />
Expand Down
1 change: 1 addition & 0 deletions dotnet/agent-framework-dotnet.slnx
Original file line number Diff line number Diff line change
Expand Up @@ -481,6 +481,7 @@
<Project Path="src/Microsoft.Agents.AI.Anthropic/Microsoft.Agents.AI.Anthropic.csproj" />
<Project Path="src/Microsoft.Agents.AI.AzureAI.Persistent/Microsoft.Agents.AI.AzureAI.Persistent.csproj" />
<Project Path="src/Microsoft.Agents.AI.Foundry/Microsoft.Agents.AI.Foundry.csproj" />
<Project Path="src/Microsoft.Agents.AI.FoundryLocal/Microsoft.Agents.AI.FoundryLocal.csproj" />
<Project Path="src/Microsoft.Agents.AI.CopilotStudio/Microsoft.Agents.AI.CopilotStudio.csproj" />
<Project Path="src/Microsoft.Agents.AI.CosmosNoSql/Microsoft.Agents.AI.CosmosNoSql.csproj" />
<Project Path="src/Microsoft.Agents.AI.Declarative/Microsoft.Agents.AI.Declarative.csproj" />
Expand Down
183 changes: 183 additions & 0 deletions dotnet/src/Microsoft.Agents.AI.FoundryLocal/FoundryLocalChatClient.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,183 @@
// Copyright (c) Microsoft. All rights reserved.

using System;
using System.ClientModel;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AI.Foundry.Local;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Shared.Diagnostics;
using OpenAI;

namespace Microsoft.Agents.AI.FoundryLocal;

/// <summary>
/// A <see cref="DelegatingChatClient"/> that uses AI Foundry Local for on-device model inference.
/// </summary>
/// <remarks>
/// <para>
/// This client manages the lifecycle of a local AI model through the Foundry Local SDK,
/// including model discovery, download, loading, and serving via an OpenAI-compatible HTTP endpoint.
/// </para>
/// <para>
/// Because initialization requires asynchronous operations (model download, loading, and web service startup),
/// instances must be created using the <see cref="CreateAsync"/> static factory method rather than
/// a constructor.
/// </para>
/// <para>
/// Internally, this client creates an <see cref="OpenAIClient"/> pointed at the local Foundry endpoint
/// (typically <c>http://localhost:5272</c>) and wraps it as an <see cref="IChatClient"/>.
/// This avoids conflicts with the Foundry Local SDK's internal use of a different OpenAI client library.
/// </para>
/// </remarks>
#pragma warning disable OPENAI001
public sealed class FoundryLocalChatClient : DelegatingChatClient
{
private readonly ChatClientMetadata _metadata;

/// <summary>
/// Gets the <see cref="FoundryLocalManager"/> instance managing the local model service.
/// </summary>
public FoundryLocalManager Manager { get; }

/// <summary>
/// Gets the resolved model identifier being used for inference.
/// </summary>
public string ModelId { get; }

private FoundryLocalChatClient(IChatClient innerClient, FoundryLocalManager manager, string modelId)
: base(innerClient)
{
this.Manager = manager;
this.ModelId = modelId;
this._metadata = new ChatClientMetadata("microsoft.foundry.local", defaultModelId: modelId);
}

/// <summary>
/// Creates a new <see cref="FoundryLocalChatClient"/> instance with the specified options.
/// </summary>
/// <param name="options">The configuration options for the Foundry Local client. Cannot be <see langword="null"/>.</param>
/// <param name="logger">An optional logger for diagnostic output during initialization.</param>
/// <param name="cancellationToken">A cancellation token that can be used to cancel the initialization.</param>
/// <returns>A task that represents the asynchronous creation operation, containing the initialized <see cref="FoundryLocalChatClient"/>.</returns>
/// <exception cref="ArgumentNullException">Thrown when <paramref name="options"/> is <see langword="null"/>.</exception>
/// <exception cref="InvalidOperationException">
/// Thrown when the model cannot be resolved from the options or environment, when the specified model is not found
/// in the Foundry Local catalog, or when the web service endpoint is not available after startup.
/// </exception>
/// <remarks>
/// <para>
/// This method performs the following steps based on the provided <paramref name="options"/>:
/// </para>
/// <list type="number">
/// <item><description>Resolves the model name from options or the <c>FOUNDRY_LOCAL_MODEL</c> environment variable.</description></item>
/// <item><description>Bootstraps the <see cref="FoundryLocalManager"/> if not already initialized (when <see cref="FoundryLocalClientOptions.Bootstrap"/> is <see langword="true"/>).</description></item>
/// <item><description>Resolves the model from the catalog using the model alias.</description></item>
/// <item><description>Downloads and loads the model if <see cref="FoundryLocalClientOptions.PrepareModel"/> is <see langword="true"/>.</description></item>
/// <item><description>Starts the web service endpoint if <see cref="FoundryLocalClientOptions.StartWebService"/> is <see langword="true"/>.</description></item>
/// <item><description>Creates an <see cref="OpenAIClient"/> pointed at the local endpoint and wraps it as an <see cref="IChatClient"/>.</description></item>
/// </list>
/// </remarks>
public static async Task<FoundryLocalChatClient> CreateAsync(
FoundryLocalClientOptions options,
ILogger? logger = null,
CancellationToken cancellationToken = default)
{
Throw.IfNull(options);

logger ??= NullLogger.Instance;

// 1. Resolve model name
var modelName = options.ResolveModel();

// 2. Bootstrap FoundryLocalManager if needed
if (options.Bootstrap && !FoundryLocalManager.IsInitialized)
{
var webServiceUrl = options.WebServiceUrl?.ToString() ?? "http://localhost:5272";

var config = new Configuration
{
AppName = options.AppName,
Web = new Configuration.WebService { Urls = webServiceUrl },
};

await FoundryLocalManager.CreateAsync(config, logger, cancellationToken).ConfigureAwait(false);
}

if (!FoundryLocalManager.IsInitialized)
{
throw new InvalidOperationException(
"FoundryLocalManager is not initialized. Enable Bootstrap to initialize it automatically, " +
"or initialize FoundryLocalManager manually before creating a FoundryLocalChatClient.");
}

var manager = FoundryLocalManager.Instance;

// 3. Get catalog and resolve model
var catalog = await manager.GetCatalogAsync(cancellationToken).ConfigureAwait(false);
var model = await catalog.GetModelAsync(modelName, cancellationToken).ConfigureAwait(false);

if (model is null)
{
throw new InvalidOperationException(
$"Model with alias '{modelName}' was not found in the Foundry Local catalog. " +
"Use FoundryLocalManager to list available models.");
}

var resolvedModelId = model.Id;

// 4. Download and load model if requested
if (options.PrepareModel)
{
if (!await model.IsCachedAsync(cancellationToken).ConfigureAwait(false))
{
await model.DownloadAsync().ConfigureAwait(false);
}

if (!await model.IsLoadedAsync(cancellationToken).ConfigureAwait(false))
{
await model.LoadAsync(cancellationToken).ConfigureAwait(false);
}
}

// 5. Start web service if needed
if (options.StartWebService && manager.Urls is null)
{
await manager.StartWebServiceAsync(cancellationToken).ConfigureAwait(false);
}

var urls = manager.Urls;
if (urls is null || urls.Length == 0)
{
throw new InvalidOperationException(
"The Foundry Local web service is not running and no endpoint URLs are available. " +
"Ensure StartWebService is enabled or start the service manually.");
}

// 6. Create OpenAI client pointed at the local endpoint
// Foundry Local serves OpenAI-compatible API at /v1/ (e.g., /v1/chat/completions)
var endpointUrl = urls[0].TrimEnd('/') + "/v1";
var openAIClient = new OpenAIClient(
new ApiKeyCredential("foundry-local"),
new OpenAIClientOptions { Endpoint = new Uri(endpointUrl) });

// 7. Get ChatClient and wrap as IChatClient
var chatClient = openAIClient.GetChatClient(resolvedModelId);
var innerChatClient = chatClient.AsIChatClient();

return new FoundryLocalChatClient(innerChatClient, manager, resolvedModelId);
}

/// <inheritdoc/>
public override object? GetService(Type serviceType, object? serviceKey = null)
{
return (serviceKey is null && serviceType == typeof(ChatClientMetadata))
? this._metadata
: (serviceKey is null && serviceType == typeof(FoundryLocalManager))
? this.Manager
: base.GetService(serviceType, serviceKey);
}
}
#pragma warning restore OPENAI001
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
// Copyright (c) Microsoft. All rights reserved.

using System;
using System.Collections.Generic;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.Logging;
using Microsoft.Shared.Diagnostics;

namespace Microsoft.Agents.AI.FoundryLocal;

/// <summary>
/// Provides extension methods for <see cref="FoundryLocalChatClient"/>
/// to simplify the creation of AI agents that work with Foundry Local on-device models.
/// </summary>
/// <remarks>
/// These extensions bridge the gap between the Foundry Local chat client and the Microsoft Agent Framework,
/// allowing developers to easily create AI agents that leverage local model inference.
/// The methods wrap the <see cref="FoundryLocalChatClient"/> in <see cref="ChatClientAgent"/> objects
/// that implement the <see cref="AIAgent"/> interface.
/// </remarks>
public static class FoundryLocalChatClientExtensions
{
/// <summary>
/// Creates an AI agent from a <see cref="FoundryLocalChatClient"/> for local model inference.
/// </summary>
/// <param name="client">The <see cref="FoundryLocalChatClient"/> to use for the agent. Cannot be <see langword="null"/>.</param>
/// <param name="instructions">Optional system instructions that define the agent's behavior and personality.</param>
/// <param name="name">Optional name for the agent for identification purposes.</param>
/// <param name="description">Optional description of the agent's capabilities and purpose.</param>
/// <param name="tools">Optional collection of AI tools that the agent can use during conversations.</param>
/// <param name="clientFactory">Provides a way to customize the creation of the underlying <see cref="IChatClient"/> used by the agent.</param>
/// <param name="loggerFactory">Optional logger factory for enabling logging within the agent.</param>
/// <param name="services">An optional <see cref="IServiceProvider"/> to use for resolving services required by the <see cref="AIFunction"/> instances being invoked.</param>
/// <returns>A <see cref="ChatClientAgent"/> instance backed by Foundry Local on-device inference.</returns>
/// <exception cref="ArgumentNullException">Thrown when <paramref name="client"/> is <see langword="null"/>.</exception>
public static ChatClientAgent AsAIAgent(
this FoundryLocalChatClient client,
string? instructions = null,
string? name = null,
string? description = null,
IList<AITool>? tools = null,
Func<IChatClient, IChatClient>? clientFactory = null,
ILoggerFactory? loggerFactory = null,
IServiceProvider? services = null) =>
client.AsAIAgent(
new ChatClientAgentOptions()
{
Name = name,
Description = description,
ChatOptions = tools is null && string.IsNullOrWhiteSpace(instructions) ? null : new ChatOptions()
{
Instructions = instructions,
Tools = tools,
}
},
clientFactory,
loggerFactory,
services);

/// <summary>
/// Creates an AI agent from a <see cref="FoundryLocalChatClient"/> for local model inference.
/// </summary>
/// <param name="client">The <see cref="FoundryLocalChatClient"/> to use for the agent. Cannot be <see langword="null"/>.</param>
/// <param name="options">Full set of options to configure the agent. Cannot be <see langword="null"/>.</param>
/// <param name="clientFactory">Provides a way to customize the creation of the underlying <see cref="IChatClient"/> used by the agent.</param>
/// <param name="loggerFactory">Optional logger factory for enabling logging within the agent.</param>
/// <param name="services">An optional <see cref="IServiceProvider"/> to use for resolving services required by the <see cref="AIFunction"/> instances being invoked.</param>
/// <returns>A <see cref="ChatClientAgent"/> instance backed by Foundry Local on-device inference.</returns>
/// <exception cref="ArgumentNullException">Thrown when <paramref name="client"/> or <paramref name="options"/> is <see langword="null"/>.</exception>
public static ChatClientAgent AsAIAgent(
this FoundryLocalChatClient client,
ChatClientAgentOptions options,
Func<IChatClient, IChatClient>? clientFactory = null,
ILoggerFactory? loggerFactory = null,
IServiceProvider? services = null)
{
Throw.IfNull(client);
Throw.IfNull(options);

IChatClient chatClient = client;

if (clientFactory is not null)
{
chatClient = clientFactory(chatClient);
}

return new ChatClientAgent(chatClient, options, loggerFactory, services);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
// Copyright (c) Microsoft. All rights reserved.

using System;

namespace Microsoft.Agents.AI.FoundryLocal;

/// <summary>
/// Configuration options for creating a <see cref="FoundryLocalChatClient"/>.
/// </summary>
/// <remarks>
/// <para>
/// These options control how the Foundry Local manager is initialized, whether models are
/// automatically downloaded and loaded, and whether the OpenAI-compatible HTTP endpoint is started.
/// </para>
/// <para>
/// The <see cref="Model"/> property is required and specifies the model alias to use (e.g., "phi-4-mini").
/// If not set explicitly, it can be resolved from the <c>FOUNDRY_LOCAL_MODEL</c> environment variable.
/// </para>
/// </remarks>
public sealed class FoundryLocalClientOptions
{
/// <summary>
/// Gets or sets the model alias or identifier to use (e.g., "phi-4-mini").
/// </summary>
/// <remarks>
/// If not set, the value will be resolved from the <c>FOUNDRY_LOCAL_MODEL</c> environment variable.
/// This property must be set (either directly or via the environment variable) before creating a
/// <see cref="FoundryLocalChatClient"/>.
/// </remarks>
public string? Model { get; set; }

/// <summary>
/// Gets or sets the application name used when initializing the <see cref="Microsoft.AI.Foundry.Local.FoundryLocalManager"/>.
/// </summary>
/// <value>The default value is <c>"AgentFramework"</c>.</value>
public string AppName { get; set; } = "AgentFramework";

/// <summary>
/// Gets or sets a value indicating whether to automatically create and initialize the
/// <see cref="Microsoft.AI.Foundry.Local.FoundryLocalManager"/> if it has not already been initialized.
/// </summary>
/// <value>The default value is <see langword="true"/>.</value>
public bool Bootstrap { get; set; } = true;

/// <summary>
/// Gets or sets a value indicating whether to automatically download and load the specified model
/// during initialization.
/// </summary>
/// <remarks>
/// When set to <see langword="true"/>, the model will be downloaded to the local cache (if not already cached)
/// and loaded into the inference service. When set to <see langword="false"/>, the model will be loaded on
/// the first inference request, which may cause a significant delay.
/// </remarks>
/// <value>The default value is <see langword="true"/>.</value>
public bool PrepareModel { get; set; } = true;

/// <summary>
/// Gets or sets a value indicating whether to start the OpenAI-compatible HTTP web service endpoint
/// if it is not already running.
/// </summary>
/// <value>The default value is <see langword="true"/>.</value>
public bool StartWebService { get; set; } = true;

/// <summary>
/// Gets or sets an optional custom binding URL for the web service endpoint.
/// </summary>
/// <remarks>
/// When set, this URL will be used to configure the web service binding via
/// <see cref="Microsoft.AI.Foundry.Local.Configuration.Web"/>.
/// When <see langword="null"/>, the default URL (typically <c>http://localhost:5272</c>) is used.
/// </remarks>
public Uri? WebServiceUrl { get; set; }

/// <summary>
/// Resolves the model name from the <see cref="Model"/> property or the <c>FOUNDRY_LOCAL_MODEL</c> environment variable.
/// </summary>
/// <returns>The resolved model name.</returns>
/// <exception cref="InvalidOperationException">
/// Thrown when neither the <see cref="Model"/> property nor the <c>FOUNDRY_LOCAL_MODEL</c> environment variable is set.
/// </exception>
internal string ResolveModel()
{
var model = this.Model ?? Environment.GetEnvironmentVariable("FOUNDRY_LOCAL_MODEL");

if (string.IsNullOrWhiteSpace(model))
{
throw new InvalidOperationException(
"A model must be specified. Set the 'Model' property on FoundryLocalClientOptions " +
"or set the 'FOUNDRY_LOCAL_MODEL' environment variable.");
}

return model;
}
}
Loading
Loading