diff --git a/Directory.Packages.props b/Directory.Packages.props
index 72dde01a..63a78e1d 100644
--- a/Directory.Packages.props
+++ b/Directory.Packages.props
@@ -56,10 +56,10 @@
-
-
-
-
+
+
+
+
diff --git a/src/Catalog.API/Extensions/Extensions.cs b/src/Catalog.API/Extensions/Extensions.cs
index bd0cde86..767a23d7 100644
--- a/src/Catalog.API/Extensions/Extensions.cs
+++ b/src/Catalog.API/Extensions/Extensions.cs
@@ -31,20 +31,18 @@ public static void AddApplicationServices(this IHostApplicationBuilder builder)
if (builder.Configuration["AI:Ollama:Endpoint"] is string ollamaEndpoint && !string.IsNullOrWhiteSpace(ollamaEndpoint))
{
- builder.Services.AddEmbeddingGenerator>(b => b
+ builder.Services.AddEmbeddingGenerator(new OllamaEmbeddingGenerator(ollamaEndpoint, builder.Configuration["AI:Ollama:EmbeddingModel"]))
.UseOpenTelemetry()
.UseLogging()
- .Use(new OllamaEmbeddingGenerator(
- new Uri(ollamaEndpoint),
- builder.Configuration["AI:Ollama:EmbeddingModel"])));
+ .Build();
}
else if (!string.IsNullOrWhiteSpace(builder.Configuration.GetConnectionString("openai")))
{
builder.AddOpenAIClientFromConfiguration("openai");
- builder.Services.AddEmbeddingGenerator>(b => b
+ builder.Services.AddEmbeddingGenerator(sp => sp.GetRequiredService().AsEmbeddingGenerator(builder.Configuration["AI:OpenAI:EmbeddingModel"]!))
.UseOpenTelemetry()
.UseLogging()
- .Use(b.Services.GetRequiredService().AsEmbeddingGenerator(builder.Configuration["AI:OpenAI:EmbeddingModel"]!)));
+ .Build();
}
builder.Services.AddScoped();
diff --git a/src/Catalog.API/Services/CatalogAI.cs b/src/Catalog.API/Services/CatalogAI.cs
index ca775d61..aa9473f5 100644
--- a/src/Catalog.API/Services/CatalogAI.cs
+++ b/src/Catalog.API/Services/CatalogAI.cs
@@ -58,7 +58,7 @@ public async ValueTask GetEmbeddingAsync(string text)
{
long timestamp = Stopwatch.GetTimestamp();
- var embedding = (await _embeddingGenerator.GenerateAsync(new[] { text }))[0].Vector;
+ var embedding = await _embeddingGenerator.GenerateEmbeddingVectorAsync(text);
embedding = embedding[0..EmbeddingDimensions];
if (_logger.IsEnabled(LogLevel.Trace))
diff --git a/src/WebApp/Extensions/Extensions.cs b/src/WebApp/Extensions/Extensions.cs
index c0da5ef5..ceb910b8 100644
--- a/src/WebApp/Extensions/Extensions.cs
+++ b/src/WebApp/Extensions/Extensions.cs
@@ -101,13 +101,11 @@ private static void AddAIServices(this IHostApplicationBuilder builder)
string? ollamaEndpoint = builder.Configuration["AI:Ollama:Endpoint"];
if (!string.IsNullOrWhiteSpace(ollamaEndpoint))
{
- builder.Services.AddChatClient(b => b
+ builder.Services.AddChatClient(new OllamaChatClient(ollamaEndpoint, builder.Configuration["AI:Ollama:ChatModel"] ?? "llama3.1"))
.UseFunctionInvocation()
.UseOpenTelemetry(configure: t => t.EnableSensitiveData = true)
.UseLogging()
- .Use(new OllamaChatClient(
- new Uri(ollamaEndpoint),
- builder.Configuration["AI:Ollama:ChatModel"] ?? "llama3.1")));
+ .Build();
}
else
{
@@ -115,11 +113,11 @@ private static void AddAIServices(this IHostApplicationBuilder builder)
if (!string.IsNullOrWhiteSpace(builder.Configuration.GetConnectionString("openai")) && !string.IsNullOrWhiteSpace(chatModel))
{
builder.AddOpenAIClientFromConfiguration("openai");
- builder.Services.AddChatClient(b => b
+ builder.Services.AddChatClient(sp => sp.GetRequiredService().AsChatClient(chatModel ?? "gpt-4o-mini"))
.UseFunctionInvocation()
.UseOpenTelemetry(configure: t => t.EnableSensitiveData = true)
.UseLogging()
- .Use(b.Services.GetRequiredService().AsChatClient(chatModel ?? "gpt-4o-mini")));
+ .Build();
}
}
}