diff --git a/KernelMemory.sln b/KernelMemory.sln
index 39151eaec..30e82c5a8 100644
--- a/KernelMemory.sln
+++ b/KernelMemory.sln
@@ -333,6 +333,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "KernelMemory", "extensions\
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AzureOpenAI.FunctionalTests", "extensions\AzureOpenAI\AzureOpenAI.FunctionalTests\AzureOpenAI.FunctionalTests.csproj", "{8E907766-4A7D-46E2-B5E3-EB2994B1AA54}"
EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "006-dotnet-serverless-azure", "examples\006-dotnet-serverless-azure\006-dotnet-serverless-azure.csproj", "{AF1E12A9-D8A1-4815-995E-C6F7B2022016}"
+EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
@@ -618,6 +620,9 @@ Global
{8E907766-4A7D-46E2-B5E3-EB2994B1AA54}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{8E907766-4A7D-46E2-B5E3-EB2994B1AA54}.Debug|Any CPU.Build.0 = Debug|Any CPU
{8E907766-4A7D-46E2-B5E3-EB2994B1AA54}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {AF1E12A9-D8A1-4815-995E-C6F7B2022016}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {AF1E12A9-D8A1-4815-995E-C6F7B2022016}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {AF1E12A9-D8A1-4815-995E-C6F7B2022016}.Release|Any CPU.ActiveCfg = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
@@ -717,6 +722,7 @@ Global
{58E65B3F-EFF0-401A-AC76-A49835AE0220} = {155DA079-E267-49AF-973A-D1D44681970F}
{AB097B62-5A0B-4D74-9F8B-A41FE8241447} = {155DA079-E267-49AF-973A-D1D44681970F}
{8E907766-4A7D-46E2-B5E3-EB2994B1AA54} = {3C17F42B-CFC8-4900-8CFB-88936311E919}
+ {AF1E12A9-D8A1-4815-995E-C6F7B2022016} = {0A43C65C-6007-4BB4-B3FE-8D439FC91841}
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {CC136C62-115C-41D1-B414-F9473EFF6EA8}
diff --git a/README.md b/README.md
index 2b32746b1..8faa251a3 100644
--- a/README.md
+++ b/README.md
@@ -416,8 +416,9 @@ Examples and Tools
1. [Collection of Jupyter notebooks with various scenarios](examples/000-notebooks)
2. [Using Kernel Memory web service to upload documents and answer questions](examples/001-dotnet-WebClient)
3. [Importing files and asking question without running the service (serverless mode)](examples/002-dotnet-Serverless)
-4. [Using KM Plugin for Semantic Kernel](examples/003-dotnet-SemanticKernel-plugin)
-5. Customizations
+4. [Kernel Memory RAG with Azure services](examples/006-dotnet-serverless-azure)
+5. [Using KM Plugin for Semantic Kernel](examples/003-dotnet-SemanticKernel-plugin)
+6. Customizations
* [Processing files with custom logic (custom handlers) in serverless mode](examples/004-dotnet-serverless-custom-pipeline)
* [Processing files with custom logic (custom handlers) in asynchronous mode](examples/005-dotnet-AsyncMemoryCustomPipeline)
* [Customizing RAG and summarization prompts](examples/101-dotnet-custom-Prompts)
@@ -427,25 +428,25 @@ Examples and Tools
* [Using a custom web scraper to fetch web pages](examples/109-dotnet-custom-webscraper)
* [Writing and using a custom ingestion handler](examples/201-dotnet-serverless-custom-handler)
* [Using Context Parameters to customize RAG prompt during a request](examples/209-dotnet-using-context-overrides)
-6. Local models and external connectors
+7. Local models and external connectors
* [Using custom LLMs](examples/104-dotnet-custom-LLM)
* [Using local LLMs with Ollama](examples/212-dotnet-ollama)
* [Using local LLMs with llama.cpp via LlamaSharp](examples/105-dotnet-serverless-llamasharp)
* [Using local models with LM Studio](examples/208-dotnet-lmstudio)
* [Using Semantic Kernel LLM connectors](examples/107-dotnet-SemanticKernel-TextCompletion)
* [Generating answers with Anthropic LLMs](examples/110-dotnet-anthropic)
-7. [Upload files and ask questions from command line using curl](examples/006-curl-calling-webservice)
-8. [Summarizing documents, using synthetic memories](examples/106-dotnet-retrieve-synthetics)
-9. [Hybrid Search with Azure AI Search](examples/111-dotnet-azure-ai-hybrid-search)
-10. [Running a single asynchronous pipeline handler as a standalone service](examples/202-dotnet-custom-handler-as-a-service)
-11. [Integrating Memory with ASP.NET applications and controllers](examples/204-dotnet-ASP.NET-MVC-integration)
-12. [Sample code showing how to extract text from files](examples/205-dotnet-extract-text-from-docs)
-13. [.NET configuration and logging](examples/206-dotnet-configuration-and-logging)
-14. [Expanding chunks retrieving adjacent partitions](examples/207-dotnet-expanding-chunks-on-retrieval)
-15. [Creating a Memory instance without KernelMemoryBuilder](examples/210-KM-without-builder)
-16. [Intent Detection](examples/211-dotnet-WebClient-Intent-Detection)
-17. [Fetching data from Discord](examples/301-discord-test-application)
-18. [Test project using KM package from nuget.org](examples/203-dotnet-using-KM-nuget)
+8. [Upload files and ask questions from command line using curl](examples/006-curl-calling-webservice)
+9. [Summarizing documents, using synthetic memories](examples/106-dotnet-retrieve-synthetics)
+10. [Hybrid Search with Azure AI Search](examples/111-dotnet-azure-ai-hybrid-search)
+11. [Running a single asynchronous pipeline handler as a standalone service](examples/202-dotnet-custom-handler-as-a-service)
+12. [Integrating Memory with ASP.NET applications and controllers](examples/204-dotnet-ASP.NET-MVC-integration)
+13. [Sample code showing how to extract text from files](examples/205-dotnet-extract-text-from-docs)
+14. [.NET configuration and logging](examples/206-dotnet-configuration-and-logging)
+15. [Expanding chunks retrieving adjacent partitions](examples/207-dotnet-expanding-chunks-on-retrieval)
+16. [Creating a Memory instance without KernelMemoryBuilder](examples/210-KM-without-builder)
+17. [Intent Detection](examples/211-dotnet-WebClient-Intent-Detection)
+18. [Fetching data from Discord](examples/301-discord-test-application)
+19. [Test project using KM package from nuget.org](examples/203-dotnet-using-KM-nuget)
## Tools
@@ -526,22 +527,22 @@ githubcontrib --repo kernel-memory --owner microsoft --showlogin true --sortBy l
:---: |:---: |:---: |:---: |:---: |:---: |
[dependabot[bot]](https://github.com/apps/dependabot) |[dluc](https://github.com/dluc) |[DM-98](https://github.com/DM-98) |[EelcoKoster](https://github.com/EelcoKoster) |[Foorcee](https://github.com/Foorcee) |[GraemeJones104](https://github.com/GraemeJones104) |
-[
](https://github.com/jurepurgar) |[
](https://github.com/JustinRidings) |[
](https://github.com/kbeaugrand) |[
](https://github.com/koteus) |[
](https://github.com/KSemenenko) |[
](https://github.com/lecramr) |
+[
](https://github.com/imranshams) |[
](https://github.com/jurepurgar) |[
](https://github.com/JustinRidings) |[
](https://github.com/kbeaugrand) |[
](https://github.com/koteus) |[
](https://github.com/KSemenenko) |
:---: |:---: |:---: |:---: |:---: |:---: |
-[jurepurgar](https://github.com/jurepurgar) |[JustinRidings](https://github.com/JustinRidings) |[kbeaugrand](https://github.com/kbeaugrand) |[koteus](https://github.com/koteus) |[KSemenenko](https://github.com/KSemenenko) |[lecramr](https://github.com/lecramr) |
+[imranshams](https://github.com/imranshams) |[jurepurgar](https://github.com/jurepurgar) |[JustinRidings](https://github.com/JustinRidings) |[kbeaugrand](https://github.com/kbeaugrand) |[koteus](https://github.com/koteus) |[KSemenenko](https://github.com/KSemenenko) |
-[
](https://github.com/luismanez) |[
](https://github.com/marcominerva) |[
](https://github.com/neel015) |[
](https://github.com/pascalberger) |[
](https://github.com/pawarsum12) |[
](https://github.com/pradeepr-roboticist) |
+[
](https://github.com/lecramr) |[
](https://github.com/luismanez) |[
](https://github.com/marcominerva) |[
](https://github.com/neel015) |[
](https://github.com/pascalberger) |[
](https://github.com/pawarsum12) |
:---: |:---: |:---: |:---: |:---: |:---: |
-[luismanez](https://github.com/luismanez) |[marcominerva](https://github.com/marcominerva) |[neel015](https://github.com/neel015) |[pascalberger](https://github.com/pascalberger) |[pawarsum12](https://github.com/pawarsum12) |[pradeepr-roboticist](https://github.com/pradeepr-roboticist) |
+[lecramr](https://github.com/lecramr) |[luismanez](https://github.com/luismanez) |[marcominerva](https://github.com/marcominerva) |[neel015](https://github.com/neel015) |[pascalberger](https://github.com/pascalberger) |[pawarsum12](https://github.com/pawarsum12) |
-[
](https://github.com/qihangnet) |[
](https://github.com/roldengarm) |[
](https://github.com/setuc) |[
](https://github.com/slapointe) |[
](https://github.com/slorello89) |[
](https://github.com/snakex64) |
+[
](https://github.com/pradeepr-roboticist) |[
](https://github.com/qihangnet) |[
](https://github.com/roldengarm) |[
](https://github.com/setuc) |[
](https://github.com/slapointe) |[
](https://github.com/slorello89) |
:---: |:---: |:---: |:---: |:---: |:---: |
-[qihangnet](https://github.com/qihangnet) |[roldengarm](https://github.com/roldengarm) |[setuc](https://github.com/setuc) |[slapointe](https://github.com/slapointe) |[slorello89](https://github.com/slorello89) |[snakex64](https://github.com/snakex64) |
+[pradeepr-roboticist](https://github.com/pradeepr-roboticist) |[qihangnet](https://github.com/qihangnet) |[roldengarm](https://github.com/roldengarm) |[setuc](https://github.com/setuc) |[slapointe](https://github.com/slapointe) |[slorello89](https://github.com/slorello89) |
-[
](https://github.com/spenavajr) |[
](https://github.com/TaoChenOSU) |[
](https://github.com/teresaqhoang) |[
](https://github.com/tomasz-skarzynski) |[
](https://github.com/v-msamovendyuk) |[
](https://github.com/Valkozaur) |
+[
](https://github.com/snakex64) |[
](https://github.com/spenavajr) |[
](https://github.com/TaoChenOSU) |[
](https://github.com/teresaqhoang) |[
](https://github.com/tomasz-skarzynski) |[
](https://github.com/v-msamovendyuk) |
:---: |:---: |:---: |:---: |:---: |:---: |
-[spenavajr](https://github.com/spenavajr) |[TaoChenOSU](https://github.com/TaoChenOSU) |[teresaqhoang](https://github.com/teresaqhoang) |[tomasz-skarzynski](https://github.com/tomasz-skarzynski) |[v-msamovendyuk](https://github.com/v-msamovendyuk) |[Valkozaur](https://github.com/Valkozaur) |
+[snakex64](https://github.com/snakex64) |[spenavajr](https://github.com/spenavajr) |[TaoChenOSU](https://github.com/TaoChenOSU) |[teresaqhoang](https://github.com/teresaqhoang) |[tomasz-skarzynski](https://github.com/tomasz-skarzynski) |[v-msamovendyuk](https://github.com/v-msamovendyuk) |
-[
](https://github.com/vicperdana) |[
](https://github.com/walexee) |[
](https://github.com/westdavidr) |[
](https://github.com/xbotter) |
-:---: |:---: |:---: |:---: |
-[vicperdana](https://github.com/vicperdana) |[walexee](https://github.com/walexee) |[westdavidr](https://github.com/westdavidr) |[xbotter](https://github.com/xbotter) |
+[
](https://github.com/Valkozaur) |[
](https://github.com/vicperdana) |[
](https://github.com/walexee) |[
](https://github.com/westdavidr) |[
](https://github.com/xbotter) |
+:---: |:---: |:---: |:---: |:---: |
+[Valkozaur](https://github.com/Valkozaur) |[vicperdana](https://github.com/vicperdana) |[walexee](https://github.com/walexee) |[westdavidr](https://github.com/westdavidr) |[xbotter](https://github.com/xbotter) |
diff --git a/examples/002-dotnet-Serverless/Program.cs b/examples/002-dotnet-Serverless/Program.cs
index 14e92dcdc..f0e7d5fbe 100644
--- a/examples/002-dotnet-Serverless/Program.cs
+++ b/examples/002-dotnet-Serverless/Program.cs
@@ -1,6 +1,7 @@
// Copyright (c) Microsoft. All rights reserved.
using Microsoft.KernelMemory;
+using Microsoft.KernelMemory.Safety.AzureAIContentSafety;
/* Use MemoryServerlessClient to run the default import pipeline
* in the same process, without distributed queues.
@@ -21,15 +22,18 @@ public static class Program
public static async Task Main()
{
var memoryConfiguration = new KernelMemoryConfig();
- var openAIConfig = new OpenAIConfig();
- var azureOpenAITextConfig = new AzureOpenAIConfig();
- var azureOpenAIEmbeddingConfig = new AzureOpenAIConfig();
- var llamaConfig = new LlamaSharpConfig();
var searchClientConfig = new SearchClientConfig();
- var azDocIntelConfig = new AzureAIDocIntelConfig();
+
+ var azureAIContentSafetyConfig = new AzureAIContentSafetyConfig();
+ var azureAIDocIntelConfig = new AzureAIDocIntelConfig();
var azureAISearchConfig = new AzureAISearchConfig();
- var postgresConfig = new PostgresConfig();
var azureBlobConfig = new AzureBlobsConfig();
+ var azureOpenAIEmbeddingConfig = new AzureOpenAIConfig();
+ var azureOpenAITextConfig = new AzureOpenAIConfig();
+
+ var openAIConfig = new OpenAIConfig();
+ var llamaConfig = new LlamaSharpConfig();
+ var postgresConfig = new PostgresConfig();
var awsS3Config = new AWSS3Config();
new ConfigurationBuilder()
@@ -38,16 +42,17 @@ public static async Task Main()
.AddJsonFile("appsettings.Development.json", optional: true)
.Build()
.BindSection("KernelMemory", memoryConfiguration)
- .BindSection("KernelMemory:Services:OpenAI", openAIConfig)
- .BindSection("KernelMemory:Services:AzureOpenAIText", azureOpenAITextConfig)
- .BindSection("KernelMemory:Services:AzureOpenAIEmbedding", azureOpenAIEmbeddingConfig)
- .BindSection("KernelMemory:Services:LlamaSharp", llamaConfig)
- .BindSection("KernelMemory:Services:AzureAIDocIntel", azDocIntelConfig)
+ .BindSection("KernelMemory:Retrieval:SearchClient", searchClientConfig)
+ .BindSection("KernelMemory:Services:AzureAIContentSafety", azureAIContentSafetyConfig)
+ .BindSection("KernelMemory:Services:AzureAIDocIntel", azureAIDocIntelConfig)
.BindSection("KernelMemory:Services:AzureAISearch", azureAISearchConfig)
.BindSection("KernelMemory:Services:AzureBlobs", azureBlobConfig)
+ .BindSection("KernelMemory:Services:AzureOpenAIEmbedding", azureOpenAIEmbeddingConfig)
+ .BindSection("KernelMemory:Services:AzureOpenAIText", azureOpenAITextConfig)
+ .BindSection("KernelMemory:Services:OpenAI", openAIConfig)
+ .BindSection("KernelMemory:Services:LlamaSharp", llamaConfig)
.BindSection("KernelMemory:Services:AWSS3", awsS3Config)
- .BindSection("KernelMemory:Services:Postgres", postgresConfig)
- .BindSection("KernelMemory:Retrieval:SearchClient", searchClientConfig);
+ .BindSection("KernelMemory:Services:Postgres", postgresConfig);
var builder = new KernelMemoryBuilder()
.Configure(builder => builder.Services.AddLogging(l =>
@@ -57,26 +62,27 @@ public static async Task Main()
}))
.AddSingleton(memoryConfiguration)
// .WithOpenAIDefaults(Environment.GetEnvironmentVariable("OPENAI_API_KEY")) // Use OpenAI for text generation and embedding
- // .WithOpenAI(openAIConfig) // Use OpenAI for text generation and embedding
- // .WithLlamaTextGeneration(llamaConfig) // Generate answers and summaries using LLama
- // .WithAzureAISearchMemoryDb(azureAISearchConfig) // Store memories in Azure AI Search
- // .WithPostgresMemoryDb(postgresConfig) // Store memories in Postgres
- // .WithQdrantMemoryDb("http://127.0.0.1:6333") // Store memories in Qdrant
- // .WithSimpleVectorDb(SimpleVectorDbConfig.Persistent) // Store memories on disk
- // .WithAzureBlobsDocumentStorage(azureBlobConfig) // Store files in Azure Blobs
- // .WithSimpleFileStorage(SimpleFileStorageConfig.Persistent) // Store files on disk
- // .WithAWSS3DocumentStorage(awsS3Config) // Store files on AWS S3
+ // .WithOpenAI(openAIConfig) // Use OpenAI for text generation and embedding
+ // .WithLlamaTextGeneration(llamaConfig) // Generate answers and summaries using LLama
+ // .WithAzureAIContentSafetyModeration(azureAIContentSafetyConfig) // Content moderation
+ // .WithAzureAISearchMemoryDb(azureAISearchConfig) // Store memories in Azure AI Search
+ // .WithPostgresMemoryDb(postgresConfig) // Store memories in Postgres
+ // .WithQdrantMemoryDb("http://127.0.0.1:6333") // Store memories in Qdrant
+ // .WithSimpleVectorDb(SimpleVectorDbConfig.Persistent) // Store memories on disk
+ // .WithAzureBlobsDocumentStorage(azureBlobConfig) // Store files in Azure Blobs
+ // .WithSimpleFileStorage(SimpleFileStorageConfig.Persistent) // Store files on disk
+ // .WithAWSS3DocumentStorage(awsS3Config) // Store files on AWS S3
.WithAzureOpenAITextGeneration(azureOpenAITextConfig)
.WithAzureOpenAITextEmbeddingGeneration(azureOpenAIEmbeddingConfig);
if (s_imageSupportDemoEnabled)
{
- if (azDocIntelConfig.Auth == AzureAIDocIntelConfig.AuthTypes.APIKey && string.IsNullOrWhiteSpace(azDocIntelConfig.APIKey))
+ if (azureAIDocIntelConfig.Auth == AzureAIDocIntelConfig.AuthTypes.APIKey && string.IsNullOrWhiteSpace(azureAIDocIntelConfig.APIKey))
{
Console.WriteLine("Azure AI Document Intelligence API key not found. OCR demo disabled.");
s_imageSupportDemoEnabled = false;
}
- else { builder.WithAzureAIDocIntel(azDocIntelConfig); }
+ else { builder.WithAzureAIDocIntel(azureAIDocIntelConfig); }
}
s_memory = builder.Build();
diff --git a/examples/002-dotnet-Serverless/README.md b/examples/002-dotnet-Serverless/README.md
index f41cbdfd6..9b970e7aa 100644
--- a/examples/002-dotnet-Serverless/README.md
+++ b/examples/002-dotnet-Serverless/README.md
@@ -21,24 +21,17 @@ string answer = await memory.AskAsync("What's Semantic Kernel?");
# Prepare the example
-Before running the code, from the folder run this command:
+Before running the code, create a `appsettings.development.json` file (or edit `appsettings.json`),
+overriding the values. The most important are endpoints and authentication details.
-```csharp
-dotnet run setup
-```
-
-The app will ask a few questions about your configuration, storing the
-required information in `appsettings.Development.json`. This file is used when
+Note: the information stored in `appsettings.Development.json` are used only when
the env var `ASPNETCORE_ENVIRONMENT` is set to `Development`. Look at the
comments in `appsettings.json` for details and more advanced options.
-You can run the command again later to edit the file, or edit it manually for
-advanced configurations.
-
You can find more details about the configuration options in `appsettings.json`,
and more info about .NET configurations at
https://learn.microsoft.com/aspnet/core/fundamentals/configuration
# Run the example
-To run the example, depending on your platform, execute either `run.sh` or `run.cmd`.
\ No newline at end of file
+To run the example, execute `dotnet run` from this folder.
\ No newline at end of file
diff --git a/examples/002-dotnet-Serverless/appsettings.json b/examples/002-dotnet-Serverless/appsettings.json
index c4a64ce23..09477c1be 100644
--- a/examples/002-dotnet-Serverless/appsettings.json
+++ b/examples/002-dotnet-Serverless/appsettings.json
@@ -14,43 +14,15 @@
},
"KernelMemory": {
"Services": {
- "AzureOpenAIText": {
- // "ApiKey" or "AzureIdentity"
+ "AzureAIContentSafety": {
+ // "ApiKey" or "AzureIdentity". For other options see .
// AzureIdentity: use automatic AAD authentication mechanism. You can test locally
// using the env vars AZURE_TENANT_ID, AZURE_CLIENT_ID, AZURE_CLIENT_SECRET.
"Auth": "AzureIdentity",
- "Endpoint": "https://<...>.openai.azure.com/",
- "APIKey": "",
- "Deployment": "",
- // The max number of tokens supported by model deployed
- // See https://learn.microsoft.com/azure/ai-services/openai/concepts/models
- "MaxTokenTotal": 16384,
- // "ChatCompletion" or "TextCompletion"
- "APIType": "ChatCompletion",
- // How many times to retry in case of throttling.
- "MaxRetries": 10
- },
- "AzureOpenAIEmbedding": {
- // "ApiKey" or "AzureIdentity"
- // AzureIdentity: use automatic AAD authentication mechanism. You can test locally
- // using the env vars AZURE_TENANT_ID, AZURE_CLIENT_ID, AZURE_CLIENT_SECRET.
- "Auth": "AzureIdentity",
- "Endpoint": "https://<...>.openai.azure.com/",
+ "Endpoint": "https://<...>",
"APIKey": "",
- "Deployment": "",
- // The max number of tokens supported by model deployed
- // See https://learn.microsoft.com/azure/ai-services/openai/concepts/models
- "MaxTokenTotal": 8191,
- // The number of dimensions output embeddings should have.
- // Only supported in "text-embedding-3" and later models developed with
- // MRL, see https://arxiv.org/abs/2205.13147
- "EmbeddingDimensions": null,
- // How many embeddings to calculate in parallel. The max value depends on
- // the model and deployment in use.
- // See also hhttps://learn.microsoft.com/azure/ai-services/openai/reference#embeddings
- "MaxEmbeddingBatchSize": 10,
- // How many times to retry in case of throttling.
- "MaxRetries": 10
+ "GlobalSafetyThreshold": 0.0,
+ "IgnoredWords": []
},
"AzureAIDocIntel": {
// "APIKey" or "AzureIdentity".
@@ -83,6 +55,60 @@
// See https://learn.microsoft.com/rest/api/searchservice/documents/search-post?view=rest-searchservice-2024-07-01&tabs=HTTP#request-body
"UseStickySessions": false
},
+ "AzureBlobs": {
+ // "ConnectionString" or "AzureIdentity". For other options see .
+ // AzureIdentity: use automatic AAD authentication mechanism. You can test locally
+ // using the env vars AZURE_TENANT_ID, AZURE_CLIENT_ID, AZURE_CLIENT_SECRET.
+ "Auth": "AzureIdentity",
+ // Azure Storage account name, required when using AzureIdentity auth
+ // Note: you can use an env var 'KernelMemory__Services__AzureBlobs__Account' to set this
+ "Account": "",
+ // Container where to create directories and upload files
+ "Container": "smemory",
+ // Required when Auth == ConnectionString
+ // Note: you can use an env var 'KernelMemory__Services__AzureBlobs__ConnectionString' to set this
+ "ConnectionString": "",
+ // Setting used only for country clouds
+ "EndpointSuffix": "core.windows.net"
+ },
+ "AzureOpenAIEmbedding": {
+ // "ApiKey" or "AzureIdentity"
+ // AzureIdentity: use automatic AAD authentication mechanism. You can test locally
+ // using the env vars AZURE_TENANT_ID, AZURE_CLIENT_ID, AZURE_CLIENT_SECRET.
+ "Auth": "AzureIdentity",
+ "Endpoint": "https://<...>.openai.azure.com/",
+ "APIKey": "",
+ "Deployment": "",
+ // The max number of tokens supported by model deployed
+ // See https://learn.microsoft.com/azure/ai-services/openai/concepts/models
+ "MaxTokenTotal": 8191,
+ // The number of dimensions output embeddings should have.
+ // Only supported in "text-embedding-3" and later models developed with
+ // MRL, see https://arxiv.org/abs/2205.13147
+ "EmbeddingDimensions": null,
+ // How many embeddings to calculate in parallel. The max value depends on
+ // the model and deployment in use.
+ // See also hhttps://learn.microsoft.com/azure/ai-services/openai/reference#embeddings
+ "MaxEmbeddingBatchSize": 10,
+ // How many times to retry in case of throttling.
+ "MaxRetries": 10
+ },
+ "AzureOpenAIText": {
+ // "ApiKey" or "AzureIdentity"
+ // AzureIdentity: use automatic AAD authentication mechanism. You can test locally
+ // using the env vars AZURE_TENANT_ID, AZURE_CLIENT_ID, AZURE_CLIENT_SECRET.
+ "Auth": "AzureIdentity",
+ "Endpoint": "https://<...>.openai.azure.com/",
+ "APIKey": "",
+ "Deployment": "",
+ // The max number of tokens supported by model deployed
+ // See https://learn.microsoft.com/azure/ai-services/openai/concepts/models
+ "MaxTokenTotal": 16384,
+ // "ChatCompletion" or "TextCompletion"
+ "APIType": "ChatCompletion",
+ // How many times to retry in case of throttling.
+ "MaxRetries": 10
+ },
"OpenAI": {
// Name of the model used to generate text (text completion or chat completion)
"TextModel": "gpt-4o-mini",
diff --git a/examples/002-dotnet-Serverless/setup.cmd b/examples/002-dotnet-Serverless/setup.cmd
deleted file mode 100644
index 75e18a50b..000000000
--- a/examples/002-dotnet-Serverless/setup.cmd
+++ /dev/null
@@ -1,5 +0,0 @@
-@echo off
-
-dotnet restore
-dotnet build
-dotnet run setup
diff --git a/examples/004-dotnet-serverless-custom-pipeline/README.md b/examples/004-dotnet-serverless-custom-pipeline/README.md
index 337da2d53..2a2045150 100644
--- a/examples/004-dotnet-serverless-custom-pipeline/README.md
+++ b/examples/004-dotnet-serverless-custom-pipeline/README.md
@@ -39,5 +39,6 @@ Note that as soon as `ImportDocumentAsync` is done, the memories are available f
# Run the example
-To run the example, either set the `OPENAI_API_KEY` environment variable with your
-OpenAI API key, or adjust the memory builder code to use Azure or other LLMs.
+To run the example, first set the `OPENAI_API_KEY` environment variable with your
+OpenAI API key, or adjust the memory builder code to use Azure or other LLMs, then
+execute `dotnet run` from this folder.
\ No newline at end of file
diff --git a/examples/004-dotnet-serverless-custom-pipeline/setup.cmd b/examples/004-dotnet-serverless-custom-pipeline/setup.cmd
deleted file mode 100644
index 75e18a50b..000000000
--- a/examples/004-dotnet-serverless-custom-pipeline/setup.cmd
+++ /dev/null
@@ -1,5 +0,0 @@
-@echo off
-
-dotnet restore
-dotnet build
-dotnet run setup
diff --git a/examples/004-dotnet-serverless-custom-pipeline/setup.sh b/examples/004-dotnet-serverless-custom-pipeline/setup.sh
deleted file mode 100755
index 0ec8f85ae..000000000
--- a/examples/004-dotnet-serverless-custom-pipeline/setup.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/env bash
-
-set -e
-
-cd "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/"
-
-dotnet restore
-dotnet build
-dotnet run setup
-
diff --git a/examples/006-dotnet-serverless-azure/006-dotnet-serverless-azure.csproj b/examples/006-dotnet-serverless-azure/006-dotnet-serverless-azure.csproj
new file mode 100644
index 000000000..fca07cf1e
--- /dev/null
+++ b/examples/006-dotnet-serverless-azure/006-dotnet-serverless-azure.csproj
@@ -0,0 +1,23 @@
+
+
+
+ net8.0
+ enable
+
+
+
+
+
+
+
+
+
+ Always
+
+
+
+ Always
+
+
+
+
diff --git a/examples/006-dotnet-serverless-azure/Program.cs b/examples/006-dotnet-serverless-azure/Program.cs
new file mode 100644
index 000000000..d99e2c41f
--- /dev/null
+++ b/examples/006-dotnet-serverless-azure/Program.cs
@@ -0,0 +1,104 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using Microsoft.KernelMemory;
+using Microsoft.KernelMemory.Safety.AzureAIContentSafety;
+
+///
+/// This example uses all and only Azure services
+///
+/// - Azure Blobs: used to store files.
+/// - Azure AI Document Intelligence: used to extract text from images.
+/// - Azure OpenAI: used to index data with embeddings and to generate answers.
+/// - Azure AI Search: used to store embeddings and chunks of text.
+/// - Azure Content Safety: validate LLM output to avoid unsafe content.
+///
+public static class Program
+{
+ private static MemoryServerless? s_memory;
+ private const string IndexName = "example006";
+
+ public static async Task Main()
+ {
+ var memoryConfiguration = new KernelMemoryConfig();
+
+ var azureAIContentSafetyConfig = new AzureAIContentSafetyConfig();
+ var azureAIDocIntelConfig = new AzureAIDocIntelConfig();
+ var azureAISearchConfig = new AzureAISearchConfig();
+ var azureBlobConfig = new AzureBlobsConfig();
+ var azureOpenAIEmbeddingConfig = new AzureOpenAIConfig();
+ var azureOpenAITextConfig = new AzureOpenAIConfig();
+
+ new ConfigurationBuilder()
+ .AddJsonFile("appsettings.json")
+ .AddJsonFile("appsettings.development.json", optional: true)
+ .AddJsonFile("appsettings.Development.json", optional: true)
+ .Build()
+ .BindSection("KernelMemory", memoryConfiguration)
+ .BindSection("KernelMemory:Services:AzureAIContentSafety", azureAIContentSafetyConfig)
+ .BindSection("KernelMemory:Services:AzureAIDocIntel", azureAIDocIntelConfig)
+ .BindSection("KernelMemory:Services:AzureAISearch", azureAISearchConfig)
+ .BindSection("KernelMemory:Services:AzureBlobs", azureBlobConfig)
+ .BindSection("KernelMemory:Services:AzureOpenAIEmbedding", azureOpenAIEmbeddingConfig)
+ .BindSection("KernelMemory:Services:AzureOpenAIText", azureOpenAITextConfig);
+
+ var builder = new KernelMemoryBuilder()
+ .WithAzureBlobsDocumentStorage(azureBlobConfig)
+ .WithAzureAIDocIntel(azureAIDocIntelConfig)
+ .WithAzureOpenAITextEmbeddingGeneration(azureOpenAIEmbeddingConfig)
+ .WithAzureOpenAITextGeneration(azureOpenAITextConfig)
+ .WithAzureAISearchMemoryDb(azureAISearchConfig)
+ .WithAzureAIContentSafetyModeration(azureAIContentSafetyConfig)
+ .Configure(builder => builder.Services.AddLogging(l =>
+ {
+ l.SetMinimumLevel(LogLevel.Warning);
+ l.AddSimpleConsole(c => c.SingleLine = true);
+ }));
+
+ s_memory = builder.Build();
+
+ await StoreWebPage();
+ await StoreImage();
+
+ // Test 1
+ var question = "What's Kernel Memory?";
+ Console.WriteLine($"Question: {question}");
+ var answer = await s_memory.AskAsync(question, minRelevance: 0.5, index: IndexName);
+ Console.WriteLine($"Answer: {answer.Result}\n\n");
+
+ // Test 2
+ question = "Which conference is Microsoft sponsoring?";
+ Console.WriteLine($"Question: {question}");
+ answer = await s_memory.AskAsync(question, minRelevance: 0.5, index: IndexName);
+ Console.WriteLine($"Answer: {answer.Result}\n\n");
+ }
+
+ // Downloading web pages
+ private static async Task StoreWebPage()
+ {
+ const string DocId = "webPage1";
+ if (!await s_memory!.IsDocumentReadyAsync(DocId, index: IndexName))
+ {
+ Console.WriteLine("Uploading https://raw.githubusercontent.com/microsoft/kernel-memory/main/README.md");
+ await s_memory.ImportWebPageAsync("https://raw.githubusercontent.com/microsoft/kernel-memory/main/README.md", index: IndexName, documentId: DocId);
+ }
+ else
+ {
+ Console.WriteLine($"{DocId} already uploaded.");
+ }
+ }
+
+ // Extract memory from images (OCR required)
+ private static async Task StoreImage()
+ {
+ const string DocId = "img001";
+ if (!await s_memory!.IsDocumentReadyAsync(DocId, index: IndexName))
+ {
+ Console.WriteLine("Uploading Image file with a news about a conference sponsored by Microsoft");
+ await s_memory.ImportDocumentAsync(new Document(DocId).AddFiles(["file6-ANWC-image.jpg"]), index: IndexName);
+ }
+ else
+ {
+ Console.WriteLine($"{DocId} already uploaded.");
+ }
+ }
+}
diff --git a/examples/006-dotnet-serverless-azure/README.md b/examples/006-dotnet-serverless-azure/README.md
new file mode 100644
index 000000000..e70b87b77
--- /dev/null
+++ b/examples/006-dotnet-serverless-azure/README.md
@@ -0,0 +1,31 @@
+## Example: serverless, no deployment, using \
+
+This example shows how to import multiple files and ask questions, without
+deploying the Kernel Memory Service, leveraging all Azure services:
+
+- [Azure Blobs](https://learn.microsoft.com/azure/storage/blobs/storage-blobs-introduction): used to store files.
+- [Azure AI Document Intelligence](https://azure.microsoft.com/products/ai-services/ai-document-intelligence): used to extract text from images.
+- [Azure OpenAI](https://azure.microsoft.com/products/ai-services/openai-service): used to index data with embeddings and to generate answers.
+- [Azure AI Search](https://learn.microsoft.com/azure/search/search-what-is-azure-search): used to store embeddings and chunks of text.
+- [Azure AI Content Safety](https://azure.microsoft.com/products/ai-services/ai-content-safety): validate LLM output to avoid unsafe content.
+
+For each service, you can find and configure settings in [appsettings.json](appsettings.json).
+
+The example runs a couple of memory ingestions and ask questions verifying the end to end flow, see the code in [Program.cs](Program.cs).
+
+# Prepare the example
+
+Before running the code, create a `appsettings.development.json` file (or edit `appsettings.json`),
+overriding the values. The most important are endpoints and authentication details.
+
+Note: the information stored in `appsettings.Development.json` are used only when
+the env var `ASPNETCORE_ENVIRONMENT` is set to `Development`. Look at the
+comments in `appsettings.json` for details and more advanced options.
+
+You can find more details about the configuration options in `appsettings.json`,
+and more info about .NET configurations at
+https://learn.microsoft.com/aspnet/core/fundamentals/configuration
+
+# Run the example
+
+To run the example, execute `dotnet run` from this folder.
\ No newline at end of file
diff --git a/examples/006-dotnet-serverless-azure/appsettings.json b/examples/006-dotnet-serverless-azure/appsettings.json
new file mode 100644
index 000000000..d838c7e0f
--- /dev/null
+++ b/examples/006-dotnet-serverless-azure/appsettings.json
@@ -0,0 +1,107 @@
+{
+ "Logging": {
+ "LogLevel": {
+ "Default": "Warning",
+ "Microsoft.AspNetCore": "Warning"
+ }
+ },
+ "KernelMemory": {
+ "Services": {
+ "AzureAIContentSafety": {
+ // "ApiKey" or "AzureIdentity". For other options see .
+ // AzureIdentity: use automatic AAD authentication mechanism. You can test locally
+ // using the env vars AZURE_TENANT_ID, AZURE_CLIENT_ID, AZURE_CLIENT_SECRET.
+ "Auth": "AzureIdentity",
+ "Endpoint": "https://<...>",
+ "APIKey": "",
+ "GlobalSafetyThreshold": 0.0,
+ "IgnoredWords": []
+ },
+ "AzureAIDocIntel": {
+ // "APIKey" or "AzureIdentity".
+ // AzureIdentity: use automatic AAD authentication mechanism. You can test locally
+ // using the env vars AZURE_TENANT_ID, AZURE_CLIENT_ID, AZURE_CLIENT_SECRET.
+ "Auth": "AzureIdentity",
+ // Required when Auth == APIKey
+ "APIKey": "",
+ "Endpoint": ""
+ },
+ "AzureAISearch": {
+ // "ApiKey" or "AzureIdentity". For other options see .
+ // AzureIdentity: use automatic AAD authentication mechanism. You can test locally
+ // using the env vars AZURE_TENANT_ID, AZURE_CLIENT_ID, AZURE_CLIENT_SECRET.
+ "Auth": "AzureIdentity",
+ "Endpoint": "https://<...>",
+ "APIKey": "",
+ // Hybrid search is not enabled by default. Note that when using hybrid search
+ // relevance scores are different, usually lower, than when using just vector search
+ "UseHybridSearch": false,
+ // Helps improve relevance score consistency for search services with multiple replicas by
+ // attempting to route a given request to the same replica for that session. Use this when
+ // favoring consistent scoring over lower latency. Can adversely affect performance.
+ //
+ // Whether to use sticky sessions, which can help getting more consistent results.
+ // When using sticky sessions, a best-effort attempt will be made to target the same replica set.
+ // Be wary that reusing the same replica repeatedly can interfere with the load balancing of
+ // the requests across replicas and adversely affect the performance of the search service.
+ //
+ // See https://learn.microsoft.com/rest/api/searchservice/documents/search-post?view=rest-searchservice-2024-07-01&tabs=HTTP#request-body
+ "UseStickySessions": false
+ },
+ "AzureBlobs": {
+ // "ConnectionString" or "AzureIdentity". For other options see .
+ // AzureIdentity: use automatic AAD authentication mechanism. You can test locally
+ // using the env vars AZURE_TENANT_ID, AZURE_CLIENT_ID, AZURE_CLIENT_SECRET.
+ "Auth": "AzureIdentity",
+ // Azure Storage account name, required when using AzureIdentity auth
+ // Note: you can use an env var 'KernelMemory__Services__AzureBlobs__Account' to set this
+ "Account": "",
+ // Container where to create directories and upload files
+ "Container": "smemory",
+ // Required when Auth == ConnectionString
+ // Note: you can use an env var 'KernelMemory__Services__AzureBlobs__ConnectionString' to set this
+ "ConnectionString": "",
+ // Setting used only for country clouds
+ "EndpointSuffix": "core.windows.net"
+ },
+ "AzureOpenAIEmbedding": {
+ // "ApiKey" or "AzureIdentity"
+ // AzureIdentity: use automatic AAD authentication mechanism. You can test locally
+ // using the env vars AZURE_TENANT_ID, AZURE_CLIENT_ID, AZURE_CLIENT_SECRET.
+ "Auth": "AzureIdentity",
+ "Endpoint": "https://<...>.openai.azure.com/",
+ "APIKey": "",
+ "Deployment": "",
+ // The max number of tokens supported by model deployed
+ // See https://learn.microsoft.com/azure/ai-services/openai/concepts/models
+ "MaxTokenTotal": 8191,
+ // The number of dimensions output embeddings should have.
+ // Only supported in "text-embedding-3" and later models developed with
+ // MRL, see https://arxiv.org/abs/2205.13147
+ "EmbeddingDimensions": null,
+ // How many embeddings to calculate in parallel. The max value depends on
+ // the model and deployment in use.
+ // See also hhttps://learn.microsoft.com/azure/ai-services/openai/reference#embeddings
+ "MaxEmbeddingBatchSize": 10,
+ // How many times to retry in case of throttling.
+ "MaxRetries": 10
+ },
+ "AzureOpenAIText": {
+ // "ApiKey" or "AzureIdentity"
+ // AzureIdentity: use automatic AAD authentication mechanism. You can test locally
+ // using the env vars AZURE_TENANT_ID, AZURE_CLIENT_ID, AZURE_CLIENT_SECRET.
+ "Auth": "AzureIdentity",
+ "Endpoint": "https://<...>.openai.azure.com/",
+ "APIKey": "",
+ "Deployment": "",
+ // The max number of tokens supported by model deployed
+ // See https://learn.microsoft.com/azure/ai-services/openai/concepts/models
+ "MaxTokenTotal": 16384,
+ // "ChatCompletion" or "TextCompletion"
+ "APIType": "ChatCompletion",
+ // How many times to retry in case of throttling.
+ "MaxRetries": 10
+ },
+ }
+ }
+}
\ No newline at end of file
diff --git a/examples/006-dotnet-serverless-azure/file4-KM-Readme.pdf b/examples/006-dotnet-serverless-azure/file4-KM-Readme.pdf
new file mode 100644
index 000000000..1cba9d854
Binary files /dev/null and b/examples/006-dotnet-serverless-azure/file4-KM-Readme.pdf differ
diff --git a/examples/006-dotnet-serverless-azure/file6-ANWC-image.jpg b/examples/006-dotnet-serverless-azure/file6-ANWC-image.jpg
new file mode 100644
index 000000000..066896fc8
Binary files /dev/null and b/examples/006-dotnet-serverless-azure/file6-ANWC-image.jpg differ
diff --git a/examples/006-dotnet-serverless-azure/run.cmd b/examples/006-dotnet-serverless-azure/run.cmd
new file mode 100644
index 000000000..22eb3fcac
--- /dev/null
+++ b/examples/006-dotnet-serverless-azure/run.cmd
@@ -0,0 +1,5 @@
+@echo off
+
+dotnet restore
+dotnet build
+cmd /C "set ASPNETCORE_ENVIRONMENT=Development && dotnet run"
diff --git a/examples/002-dotnet-Serverless/setup.sh b/examples/006-dotnet-serverless-azure/run.sh
similarity index 70%
rename from examples/002-dotnet-Serverless/setup.sh
rename to examples/006-dotnet-serverless-azure/run.sh
index 0ec8f85ae..d8c46fb7d 100755
--- a/examples/002-dotnet-Serverless/setup.sh
+++ b/examples/006-dotnet-serverless-azure/run.sh
@@ -6,5 +6,5 @@ cd "$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/"
dotnet restore
dotnet build
-dotnet run setup
+ASPNETCORE_ENVIRONMENT=Development dotnet run
diff --git a/examples/README.md b/examples/README.md
index 375a6ba66..933033fde 100644
--- a/examples/README.md
+++ b/examples/README.md
@@ -5,8 +5,9 @@ Some examples about how to use Kernel Memory.
1. [Collection of Jupyter notebooks with various scenarios](000-notebooks)
2. [Using Kernel Memory web service to upload documents and answer questions](001-dotnet-WebClient)
3. [Importing files and asking question without running the service (serverless mode)](002-dotnet-Serverless)
-4. [Using KM Plugin for Semantic Kernel](003-dotnet-SemanticKernel-plugin)
-5. Customizations
+4. [Kernel Memory RAG with Azure services](006-dotnet-serverless-azure)
+5. [Using KM Plugin for Semantic Kernel](003-dotnet-SemanticKernel-plugin)
+6. Customizations
* [Processing files with custom logic (custom handlers) in serverless mode](004-dotnet-serverless-custom-pipeline)
* [Processing files with custom logic (custom handlers) in asynchronous mode](005-dotnet-AsyncMemoryCustomPipeline)
* [Customizing RAG and summarization prompts](101-dotnet-custom-Prompts)
@@ -16,22 +17,22 @@ Some examples about how to use Kernel Memory.
* [Using a custom web scraper to fetch web pages](109-dotnet-custom-webscraper)
* [Writing and using a custom ingestion handler](201-dotnet-serverless-custom-handler)
* [Using Context Parameters to customize RAG prompt during a request](209-dotnet-using-context-overrides)
-6. Local models and external connectors
+7. Local models and external connectors
* [Using custom LLMs](104-dotnet-custom-LLM)
* [Using local LLMs with Ollama](212-dotnet-ollama)
* [Using local LLMs with llama.cpp via LlamaSharp](105-dotnet-serverless-llamasharp)
* [Using local models with LM Studio](208-dotnet-lmstudio)
* [Using Semantic Kernel LLM connectors](107-dotnet-SemanticKernel-TextCompletion)
* [Generating answers with Anthropic LLMs](110-dotnet-anthropic)
-7. [Upload files and ask questions from command line using curl](006-curl-calling-webservice)
-8. [Summarizing documents, using synthetic memories](106-dotnet-retrieve-synthetics)
-9. [Hybrid Search with Azure AI Search](111-dotnet-azure-ai-hybrid-search)
-10. [Running a single asynchronous pipeline handler as a standalone service](202-dotnet-custom-handler-as-a-service)
-11. [Integrating Memory with ASP.NET applications and controllers](204-dotnet-ASP.NET-MVC-integration)
-12. [Sample code showing how to extract text from files](205-dotnet-extract-text-from-docs)
-13. [.NET configuration and logging](206-dotnet-configuration-and-logging)
-14. [Expanding chunks retrieving adjacent partitions](207-dotnet-expanding-chunks-on-retrieval)
-15. [Creating a Memory instance without KernelMemoryBuilder](210-KM-without-builder)
-16. [Intent Detection](211-dotnet-WebClient-Intent-Detection)
-17. [Fetching data from Discord](301-discord-test-application)
-18. [Test project using KM package from nuget.org](203-dotnet-using-KM-nuget)
+8. [Upload files and ask questions from command line using curl](006-curl-calling-webservice)
+9. [Summarizing documents, using synthetic memories](106-dotnet-retrieve-synthetics)
+10. [Hybrid Search with Azure AI Search](111-dotnet-azure-ai-hybrid-search)
+11. [Running a single asynchronous pipeline handler as a standalone service](202-dotnet-custom-handler-as-a-service)
+12. [Integrating Memory with ASP.NET applications and controllers](204-dotnet-ASP.NET-MVC-integration)
+13. [Sample code showing how to extract text from files](205-dotnet-extract-text-from-docs)
+14. [.NET configuration and logging](206-dotnet-configuration-and-logging)
+15. [Expanding chunks retrieving adjacent partitions](207-dotnet-expanding-chunks-on-retrieval)
+16. [Creating a Memory instance without KernelMemoryBuilder](210-KM-without-builder)
+17. [Intent Detection](211-dotnet-WebClient-Intent-Detection)
+18. [Fetching data from Discord](301-discord-test-application)
+19. [Test project using KM package from nuget.org](203-dotnet-using-KM-nuget)
diff --git a/service/Core/Pipeline/BaseOrchestrator.cs b/service/Core/Pipeline/BaseOrchestrator.cs
index 72aba2dee..0e4a36f4a 100644
--- a/service/Core/Pipeline/BaseOrchestrator.cs
+++ b/service/Core/Pipeline/BaseOrchestrator.cs
@@ -215,11 +215,21 @@ public async Task IsDocumentReadyAsync(string index, string documentId, Ca
try
{
+ this.Log.LogDebug("Checking if document {Id} on index {Index} is ready", documentId, index);
DataPipeline? pipeline = await this.ReadPipelineStatusAsync(index: index, documentId, cancellationToken).ConfigureAwait(false);
- return pipeline != null && pipeline.Complete && pipeline.Files.Count > 0;
+
+ if (pipeline == null)
+ {
+ this.Log.LogWarning("Document {Id} on index {Index} is not ready, pipeline is NULL", documentId, index);
+ return false;
+ }
+
+ this.Log.LogDebug("Document {Id} on index {Index}, Complete = {Complete}, Files Count = {Count}", documentId, index, pipeline.Complete, pipeline.Files.Count);
+ return pipeline.Complete && pipeline.Files.Count > 0;
}
catch (PipelineNotFoundException)
{
+ this.Log.LogWarning("Document {Id} on index {Index} not found", documentId, index);
return false;
}
}