Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion Directory.Build.props
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
<Project>

<PropertyGroup>
<LangVersion>preview</LangVersion>
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

<WarnOnPackingNonPackableProject>false</WarnOnPackingNonPackableProject>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<SuppressNETCoreSdkPreviewMessage>true</SuppressNETCoreSdkPreviewMessage>
Expand Down
20 changes: 11 additions & 9 deletions Directory.Packages.props
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,11 @@
<PropertyGroup>
<ManagePackageVersionsCentrally>true</ManagePackageVersionsCentrally>
<CentralPackageTransitivePinningEnabled>true</CentralPackageTransitivePinningEnabled>
<AspnetVersion>9.0.1</AspnetVersion>
<MicrosoftExtensionsVersion>9.0.1</MicrosoftExtensionsVersion>
<AspireVersion>9.0.0</AspireVersion>
<AspireUnstablePackagesVersion>9.0.0-preview.5.24551.3</AspireUnstablePackagesVersion>
<AspnetVersion>9.0.2</AspnetVersion>
<MicrosoftExtensionsVersion>9.0.2</MicrosoftExtensionsVersion>
<MicrosoftExtensionsAIVersion>9.3.0-preview.1.25114.11</MicrosoftExtensionsAIVersion>
<AspireVersion>9.1.0</AspireVersion>
<AspireUnstablePackagesVersion>9.1.0-preview.1.25121.10</AspireUnstablePackagesVersion>
<GrpcVersion>2.67.0</GrpcVersion>
<DuendeVersion>7.0.8</DuendeVersion>
<ApiVersioningVersion>8.1.0</ApiVersioningVersion>
Expand All @@ -28,6 +29,7 @@
</PackageVersion>
<PackageVersion Include="CommunityToolkit.Aspire.Hosting.Ollama" Version="9.1.0" />
<PackageVersion Include="CommunityToolkit.Aspire.OllamaSharp" Version="9.1.0" />
<PackageVersion Include="OllamaSharp" Version="5.0.7" /> <!-- Need a new version that supports MEAI 9.3+ -->
<PackageVersion Include="Microsoft.Extensions.ServiceDiscovery" Version="$(AspireVersion)" />
<PackageVersion Include="Microsoft.Extensions.ServiceDiscovery.Yarp" Version="$(AspireVersion)" />
<!-- Version together with Asp.Versioning -->
Expand Down Expand Up @@ -62,10 +64,10 @@
<!-- Xabaril packages -->
<PackageVersion Include="AspNetCore.HealthChecks.Uris" Version="9.0.0" />
<!-- AI -->
<PackageVersion Include="Microsoft.Extensions.AI" Version="9.1.0-preview.1.25064.3" />
<PackageVersion Include="Microsoft.Extensions.AI.Abstractions" Version="9.1.0-preview.1.25064.3" />
<PackageVersion Include="Microsoft.Extensions.AI.OpenAI" Version="9.1.0-preview.1.25064.3" />
<PackageVersion Include="Microsoft.Extensions.AI.Ollama" Version="9.1.0-preview.1.25064.3" />
<PackageVersion Include="Microsoft.Extensions.AI" Version="$(MicrosoftExtensionsAIVersion)" />
<PackageVersion Include="Microsoft.Extensions.AI.Abstractions" Version="$(MicrosoftExtensionsAIVersion)" />
<PackageVersion Include="Microsoft.Extensions.AI.OpenAI" Version="$(MicrosoftExtensionsAIVersion)" />
<PackageVersion Include="Microsoft.Extensions.AI.Ollama" Version="$(MicrosoftExtensionsAIVersion)" />
<!-- Open Telemetry -->
<PackageVersion Include="OpenTelemetry.Exporter.OpenTelemetryProtocol" Version="1.11.0" />
<PackageVersion Include="OpenTelemetry.Extensions.Hosting" Version="1.11.0" />
Expand Down Expand Up @@ -93,7 +95,7 @@
<PackageVersion Include="FluentValidation.AspNetCore" Version="11.3.0" />
<PackageVersion Include="MediatR" Version="12.4.1" />
<PackageVersion Include="Microsoft.Web.LibraryManager.Build" Version="2.1.175" />
<PackageVersion Include="Polly.Core" Version="8.5.0" />
<PackageVersion Include="Polly.Core" Version="8.5.1" />
<PackageVersion Include="System.Reflection.TypeExtensions" Version="4.7.0" />
<PackageVersion Include="xunit" Version="2.9.2" />
<PackageVersion Include="xunit.runner.visualstudio" Version="2.8.2" />
Expand Down
2 changes: 1 addition & 1 deletion global.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"sdk": {
"version": "9.0.100",
"version": "9.0.200",
"rollForward": "latestFeature",
"allowPrerelease": true
}
Expand Down
6 changes: 3 additions & 3 deletions src/Catalog.API/Extensions/Extensions.cs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
using eShop.Catalog.API.Services;
using Microsoft.Extensions.AI;
using OllamaSharp;
using OpenAI;

public static class Extensions
Expand Down Expand Up @@ -40,7 +41,7 @@ public static void AddApplicationServices(this IHostApplicationBuilder builder)
if (builder.Configuration["OllamaEnabled"] is string ollamaEnabled && bool.Parse(ollamaEnabled))
{
builder.AddOllamaSharpEmbeddingGenerator("embedding");
builder.Services.AddEmbeddingGenerator(b => b.GetRequiredService<IEmbeddingGenerator<string, Embedding<float>>>())
builder.Services.AddEmbeddingGenerator(sp => (IEmbeddingGenerator<string, Embedding<float>>)sp.GetRequiredKeyedService<IOllamaApiClient>("embedding"))
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

FYI @stephentoub @SteveSandersonMS on the change here and in WebApp/Extensions/Extensions.cs.

These 2 things were pretty tricky.

  1. I was getting infinite recursion on Ollama since builder.Services.AddEmbeddingGenerator is adding the IEmbeddingGenerator<string, Embedding<float>> service, and this was trying to get the same service. So it recurses forever.
  2. In OpenAI code below, the call to Build() was failing because it was passing in an empty IServiceProvider to line 51, and that factory was trying to get the OpenAIClient out of the sp, which was empty.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I didn't quite follow that. But is there something you'd recommend we change to make something easier?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

But is there something you'd recommend we change to make something easier?

For the 2nd problem (the call to Build()), there is some awkwardness IMO. Take a look at the existing code:

builder.Services.AddEmbeddingGenerator(sp => sp.GetRequiredService<OpenAIClient>().AsEmbeddingGenerator(builder.Configuration["AI:OpenAI:EmbeddingModel"]!))
.UseOpenTelemetry()
.UseLogging()
.Build();

Code like this will never work.

The call to Build() doesn't pass an IServiceProvider. Thus the sp that is passed to the callback on line 50 will be empty, and won't contain any services. This is because the Build() method looks like this:

    public IEmbeddingGenerator<TInput, TEmbedding> Build(IServiceProvider? services = null)
    {
        services ??= EmptyServiceProvider.Instance;

Maybe Build should have a non-optional IServiceProvider parameter? There is a constructor that takes a Func<IServiceProvider, IEmbeddingGenerator<TInput, TEmbedding>> innerGeneratorFactory which won't work without a valid IServiceProvider being passed. Requiring the IServiceProvider would indicate to the caller that you shouldn't call Build() when adding services to your application, since you don't have an IServiceProvider yet.

For the infinite recursion problem, I'm not sure I have a suggestion. I think this was just confusion on my part trying to understand the old vs the new API usage. When writing new code, I don't think anyone would would implement the "innerGeneratorFactory" by directly returning the same service from the IServiceProvider . This is the function that is supposed to create the generator.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

To make sure I understand, you're saying that you not being forced to think about passing a serviceProvider to Build (even if you decide not to pass one) is the problem?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm saying I shouldn't ever call Build() in this code. And if I was forced to pass an IServiceProvider explicitly, it would be obvious that I shouldn't call it here.

The way the existing code currently looks, I would have expected the IServiceProvider from builder.Services (where this code started) to be used. But it isn't used, instead an empty one is passed to the callback on line 50.

Copy link
Member

@eiriktsarpalis eiriktsarpalis Feb 25, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The call to Build() in this location is redundant, since it simply returns an IChatClient instance that is being discarded. If I'm honest I find the pattern used by the AddChatClient/AddEmbeddingGenerator methods somewhat strange and I can see why it could lead to confusion. Instead of doing

builder.Services.AddChatClient(sp => new Implementation())
     .UseOpenTelemetry()
     .UseLogging();

Why not just have the method return void and encourage the equivalent pattern:

builder.Services.AddChatClient(sp => new Implementation()
     .AsBuilder()
     .UseOpenTelemetry() 
     .UseLogging()
     .Build());

It might be more lines of code but the intent is clearer.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Once Return AspireOpenAIClientBuilder from AddOpenAIClientFromConfiguration (dotnet/aspire#7763) is merged, this code will get even more simple. (At least the OpenAI part will. We will need to update the Ollama Aspire integration to follow the same API pattern.)

builder.AddOpenAIClientFromConfiguration("openai");
builder.Services.AddEmbeddingGenerator(sp => sp.GetRequiredService<OpenAIClient>().AsEmbeddingGenerator(builder.Configuration["AI:OpenAI:EmbeddingModel"]!))
    .UseOpenTelemetry()
    .UseLogging();

becomes

builder.AddOpenAIClientFromConfiguration("openai")
    .AddEmbeddingGenerator(builder.Configuration["AI:OpenAI:EmbeddingModel"]!)
    .UseLogging();  // I'm not positive this is required, but I don't see Aspire calling it today

And with further changes like Configure OpenAI models in the app host (dotnet/aspire#6577) we will be able to drop the call to builder.Configuration, and it will simply be:

builder.AddOpenAIClientFromConfiguration("openai-embedding")
    .AddEmbeddingGenerator()
    .UseLogging();  // I'm not positive this is required, but I don't see Aspire calling it today

Since the model/deployment name will be passed via the connection string named "openai-embedding".

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

.UseLogging(); // I'm not positive this is required, but I don't see Aspire calling it today

Assuming UseOpenTelemetry is being used, UseLogging is probably not necessary.

Copy link
Member Author

@eerhardt eerhardt Feb 25, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Assuming UseOpenTelemetry is being used

Yes, it is by default. The Aspire integration enables OpenTelemetry unless someone disables tracing:

https://github.com/dotnet/aspire/blob/3d3ccd4eb8b3f300a91b54f763cbad4ade71ad24/src/Components/Aspire.OpenAI/AspireOpenAIClientBuilderEmbeddingGeneratorExtensions.cs#L59-L61

        return builder.DisableTracing
            ? result
            : new OpenTelemetryEmbeddingGenerator<string, Embedding<float>>(result);

but diffing this code with what's in UseOpenTelemetry(), maybe we need to update it to pass an ILogger in:

https://github.com/dotnet/extensions/blob/576fa221f57f80423a4852e446cee2aa2e937aab/src/Libraries/Microsoft.Extensions.AI/Embeddings/OpenTelemetryEmbeddingGeneratorBuilderExtensions.cs#L36-L43

            loggerFactory ??= services.GetService<ILoggerFactory>();


            var generator = new OpenTelemetryEmbeddingGenerator<TInput, TEmbedding>(
                innerGenerator,
                loggerFactory?.CreateLogger(typeof(OpenTelemetryEmbeddingGenerator<TInput, TEmbedding>)),
                sourceName);
            configure?.Invoke(generator);
            return generator;

I've opened Update OpenAI OpenTelemetry integration to pass an ILogger (dotnet/aspire#7771) for this.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We will need to update the Ollama Aspire integration to follow the same API pattern.

This was done in CommunityToolkit/Aspire#465. We can update here once it ships in a stable version of the Aspire Ollama integration.

cc @aaronpowell

.UseOpenTelemetry()
.UseLogging();
}
Expand All @@ -49,8 +50,7 @@ public static void AddApplicationServices(this IHostApplicationBuilder builder)
builder.AddOpenAIClientFromConfiguration("openai");
builder.Services.AddEmbeddingGenerator(sp => sp.GetRequiredService<OpenAIClient>().AsEmbeddingGenerator(builder.Configuration["AI:OpenAI:EmbeddingModel"]!))
.UseOpenTelemetry()
.UseLogging()
.Build();
.UseLogging();
}

builder.Services.AddScoped<ICatalogAI, CatalogAI>();
Expand Down
4 changes: 2 additions & 2 deletions src/WebApp/Components/Chatbot/ChatState.cs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ public ChatState(

if (_logger.IsEnabled(LogLevel.Debug))
{
_logger.LogDebug("ChatModel: {model}", chatClient.Metadata.ModelId);
_logger.LogDebug("ChatModel: {model}", chatClient.GetService<ChatClientMetadata>()?.ModelId);
}

_chatClient = chatClient;
Expand Down Expand Up @@ -75,7 +75,7 @@ public async Task AddUserMessageAsync(string userText, Action onMessageAdded)
// Get and store the AI's response message
try
{
ChatCompletion response = await _chatClient.CompleteAsync(Messages, _chatOptions);
var response = await _chatClient.GetResponseAsync(Messages, _chatOptions);
if (!string.IsNullOrWhiteSpace(response.Message.Text))
{
Messages.Add(response.Message);
Expand Down
13 changes: 5 additions & 8 deletions src/WebApp/Extensions/Extensions.cs
Original file line number Diff line number Diff line change
@@ -1,16 +1,14 @@
using System;
using Azure.AI.OpenAI;
using eShop.Basket.API.Grpc;
using eShop.WebApp;
using eShop.WebApp.Services.OrderStatus.IntegrationEvents;
using eShop.WebAppComponents.Services;
using Microsoft.AspNetCore.Authentication.Cookies;
using Microsoft.AspNetCore.Authentication.OpenIdConnect;
using Microsoft.AspNetCore.Components.Authorization;
using Microsoft.AspNetCore.Components.Server;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.IdentityModel.JsonWebTokens;
using eShop.WebApp.Services.OrderStatus.IntegrationEvents;
using eShop.Basket.API.Grpc;
using OllamaSharp;
using OpenAI;

public static class Extensions
Expand Down Expand Up @@ -101,7 +99,7 @@ private static void AddAIServices(this IHostApplicationBuilder builder)
if (builder.Configuration["OllamaEnabled"] is string ollamaEnabled && bool.Parse(ollamaEnabled))
{
builder.AddOllamaSharpChatClient("chat");
builder.Services.AddChatClient(b => b.GetRequiredService<IChatClient>())
builder.Services.AddChatClient(sp => (IChatClient)sp.GetRequiredKeyedService<IOllamaApiClient>("chat"))
.UseFunctionInvocation()
.UseOpenTelemetry(configure: t => t.EnableSensitiveData = true)
.UseLogging();
Expand All @@ -115,8 +113,7 @@ private static void AddAIServices(this IHostApplicationBuilder builder)
builder.Services.AddChatClient(sp => sp.GetRequiredService<OpenAIClient>().AsChatClient(chatModel ?? "gpt-4o-mini"))
.UseFunctionInvocation()
.UseOpenTelemetry(configure: t => t.EnableSensitiveData = true)
.UseLogging()
.Build();
.UseLogging();
}
}
}
Expand Down
24 changes: 22 additions & 2 deletions src/eShop.AppHost/Extensions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ public static IDistributedApplicationBuilder AddOpenAI(this IDistributedApplicat
const string textEmbeddingModelName = "text-embedding-3-small";
const string chatModelName = "gpt-4o-mini";

// to use an existing OpenAI resource, add the following to the AppHost user secrets:
// to use an existing OpenAI resource as a connection string, add the following to the AppHost user secrets:
// "ConnectionStrings": {
// "openai": "Key=<API Key>" (to use https://api.openai.com/)
// -or-
Expand All @@ -60,9 +60,29 @@ public static IDistributedApplicationBuilder AddOpenAI(this IDistributedApplicat
// "ResourceGroupPrefix": "<prefix>",
// "Location": "<location>"
// }
openAI = builder.AddAzureOpenAI(openAIName)

var openAITyped = builder.AddAzureOpenAI(openAIName);

// to use an existing Azure OpenAI resource via provisioning, add the following to the AppHost user secrets:
// "Parameters": {
// "openaiName": "<Azure OpenAI resource name>",
// "openaiResourceGroup": "<Azure OpenAI resource group>"
// }
// - or -
// leave the parameters out to create a new Azure OpenAI resource
if (builder.Configuration["Parameters:openaiName"] is not null &&
builder.Configuration["Parameters:openaiResourceGroup"] is not null)
{
openAITyped.AsExisting(
builder.AddParameter("openaiName"),
builder.AddParameter("openaiResourceGroup"));
}

openAITyped
.AddDeployment(new AzureOpenAIDeployment(chatModelName, "gpt-4o-mini", "2024-07-18"))
.AddDeployment(new AzureOpenAIDeployment(textEmbeddingModelName, "text-embedding-3-small", "1", skuCapacity: 20)); // 20k tokens per minute are needed to seed the initial embeddings

openAI = openAITyped;
}

catalogApi
Expand Down
1 change: 1 addition & 0 deletions src/eShop.AppHost/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
.WithReference(redis)
.WithReference(rabbitMq).WaitFor(rabbitMq)
.WithEnvironment("Identity__Url", identityEndpoint);
redis.WithParentRelationship(basketApi);
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This isn't necessary, but it felt good to use the new feature. This nests the redis resource under the basketApi resource in the dashboard (since it is the only one that uses it).


var catalogApi = builder.AddProject<Projects.Catalog_API>("catalog-api")
.WithReference(rabbitMq).WaitFor(rabbitMq)
Expand Down
2 changes: 1 addition & 1 deletion src/eShop.AppHost/eShop.AppHost.csproj
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
<Project Sdk="Microsoft.NET.Sdk">
<Sdk Name="Aspire.AppHost.Sdk" Version="9.0.0" />
<Sdk Name="Aspire.AppHost.Sdk" Version="9.1.0" />

<PropertyGroup>
<OutputType>Exe</OutputType>
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
<Project Sdk="Microsoft.NET.Sdk">
<Sdk Name="Aspire.AppHost.Sdk" Version="9.0.0" />
<Sdk Name="Aspire.AppHost.Sdk" Version="9.1.0" />

<PropertyGroup>
<TargetFramework>net9.0</TargetFramework>
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
<Project Sdk="Microsoft.NET.Sdk">
<Sdk Name="Aspire.AppHost.Sdk" Version="9.0.0" />
<Sdk Name="Aspire.AppHost.Sdk" Version="9.1.0" />

<PropertyGroup>
<TargetFramework>net9.0</TargetFramework>
Expand Down